diff --git "a/attnserver.run_attnserver.slurm.sh.343213.err.log" "b/attnserver.run_attnserver.slurm.sh.343213.err.log" new file mode 100644--- /dev/null +++ "b/attnserver.run_attnserver.slurm.sh.343213.err.log" @@ -0,0 +1,36574 @@ ++ source /mnt/weka/home/hao.zhang/conda/miniconda/bin/activate +++ _CONDA_ROOT=/mnt/weka/home/hao.zhang/conda/miniconda +++ . /mnt/weka/home/hao.zhang/conda/miniconda/etc/profile.d/conda.sh ++++ export CONDA_EXE=/mnt/weka/home/hao.zhang/conda/miniconda/bin/conda ++++ CONDA_EXE=/mnt/weka/home/hao.zhang/conda/miniconda/bin/conda ++++ export _CE_M= ++++ _CE_M= ++++ export _CE_CONDA= ++++ _CE_CONDA= ++++ export CONDA_PYTHON_EXE=/mnt/weka/home/hao.zhang/conda/miniconda/bin/python ++++ CONDA_PYTHON_EXE=/mnt/weka/home/hao.zhang/conda/miniconda/bin/python ++++ '[' -z x ']' +++ conda activate +++ local cmd=activate +++ case "$cmd" in +++ __conda_activate activate +++ '[' -n '' ']' +++ local ask_conda ++++ PS1= ++++ __conda_exe shell.posix activate ++++ '[' -n '' ']' ++++ /mnt/weka/home/hao.zhang/conda/miniconda/bin/conda shell.posix activate +++ ask_conda='unset _CE_M +unset _CE_CONDA +PS1='\''(base) '\'' +export PATH='\''/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/conda/miniconda/bin:/mnt/weka/home/hao.zhang/conda/miniconda/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin'\'' +export CONDA_SHLVL='\''1'\'' +export CONDA_PROMPT_MODIFIER='\''(base) '\'' +export CONDA_EXE='\''/mnt/weka/home/hao.zhang/conda/miniconda/bin/conda'\'' +export CONDA_PYTHON_EXE='\''/mnt/weka/home/hao.zhang/conda/miniconda/bin/python'\''' +++ eval 'unset _CE_M +unset _CE_CONDA +PS1='\''(base) '\'' +export PATH='\''/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/conda/miniconda/bin:/mnt/weka/home/hao.zhang/conda/miniconda/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin'\'' +export CONDA_SHLVL='\''1'\'' +export CONDA_PROMPT_MODIFIER='\''(base) '\'' +export CONDA_EXE='\''/mnt/weka/home/hao.zhang/conda/miniconda/bin/conda'\'' +export CONDA_PYTHON_EXE='\''/mnt/weka/home/hao.zhang/conda/miniconda/bin/python'\''' ++++ unset _CE_M ++++ unset _CE_CONDA ++++ PS1='(base) ' ++++ export PATH=/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/conda/miniconda/bin:/mnt/weka/home/hao.zhang/conda/miniconda/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin ++++ PATH=/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/conda/miniconda/bin:/mnt/weka/home/hao.zhang/conda/miniconda/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin ++++ export CONDA_SHLVL=1 ++++ CONDA_SHLVL=1 ++++ export 'CONDA_PROMPT_MODIFIER=(base) ' ++++ CONDA_PROMPT_MODIFIER='(base) ' ++++ export CONDA_EXE=/mnt/weka/home/hao.zhang/conda/miniconda/bin/conda ++++ CONDA_EXE=/mnt/weka/home/hao.zhang/conda/miniconda/bin/conda ++++ export CONDA_PYTHON_EXE=/mnt/weka/home/hao.zhang/conda/miniconda/bin/python ++++ CONDA_PYTHON_EXE=/mnt/weka/home/hao.zhang/conda/miniconda/bin/python +++ __conda_hashr +++ '[' -n '' ']' +++ '[' -n '' ']' +++ hash -r ++ conda activate junda-attnserver ++ local cmd=activate ++ case "$cmd" in ++ __conda_activate activate junda-attnserver ++ '[' -n '' ']' ++ local ask_conda +++ PS1='(base) ' +++ __conda_exe shell.posix activate junda-attnserver +++ '[' -n '' ']' +++ /mnt/weka/home/hao.zhang/conda/miniconda/bin/conda shell.posix activate junda-attnserver ++ ask_conda='unset _CE_M +unset _CE_CONDA +PS1='\''(junda-attnserver) '\'' +export PATH='\''/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin:/mnt/weka/home/hao.zhang/conda/miniconda/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin'\'' +export CONDA_PREFIX='\''/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver'\'' +export CONDA_SHLVL='\''2'\'' +export CONDA_DEFAULT_ENV='\''junda-attnserver'\'' +export CONDA_PROMPT_MODIFIER='\''(junda-attnserver) '\'' +export CONDA_PREFIX_1='\''/mnt/weka/home/hao.zhang/conda/miniconda'\'' +export CONDA_EXE='\''/mnt/weka/home/hao.zhang/conda/miniconda/bin/conda'\'' +export CONDA_PYTHON_EXE='\''/mnt/weka/home/hao.zhang/conda/miniconda/bin/python'\''' ++ eval 'unset _CE_M +unset _CE_CONDA +PS1='\''(junda-attnserver) '\'' +export PATH='\''/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin:/mnt/weka/home/hao.zhang/conda/miniconda/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin'\'' +export CONDA_PREFIX='\''/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver'\'' +export CONDA_SHLVL='\''2'\'' +export CONDA_DEFAULT_ENV='\''junda-attnserver'\'' +export CONDA_PROMPT_MODIFIER='\''(junda-attnserver) '\'' +export CONDA_PREFIX_1='\''/mnt/weka/home/hao.zhang/conda/miniconda'\'' +export CONDA_EXE='\''/mnt/weka/home/hao.zhang/conda/miniconda/bin/conda'\'' +export CONDA_PYTHON_EXE='\''/mnt/weka/home/hao.zhang/conda/miniconda/bin/python'\''' +++ unset _CE_M +++ unset _CE_CONDA +++ PS1='(junda-attnserver) ' +++ export PATH=/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin:/mnt/weka/home/hao.zhang/conda/miniconda/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin +++ PATH=/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/.local/bin:/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin:/mnt/weka/home/hao.zhang/conda/miniconda/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin +++ export CONDA_PREFIX=/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver +++ CONDA_PREFIX=/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver +++ export CONDA_SHLVL=2 +++ CONDA_SHLVL=2 +++ export CONDA_DEFAULT_ENV=junda-attnserver +++ CONDA_DEFAULT_ENV=junda-attnserver +++ export 'CONDA_PROMPT_MODIFIER=(junda-attnserver) ' +++ CONDA_PROMPT_MODIFIER='(junda-attnserver) ' +++ export CONDA_PREFIX_1=/mnt/weka/home/hao.zhang/conda/miniconda +++ CONDA_PREFIX_1=/mnt/weka/home/hao.zhang/conda/miniconda +++ export CONDA_EXE=/mnt/weka/home/hao.zhang/conda/miniconda/bin/conda +++ CONDA_EXE=/mnt/weka/home/hao.zhang/conda/miniconda/bin/conda +++ export CONDA_PYTHON_EXE=/mnt/weka/home/hao.zhang/conda/miniconda/bin/python +++ CONDA_PYTHON_EXE=/mnt/weka/home/hao.zhang/conda/miniconda/bin/python ++ __conda_hashr ++ '[' -n '' ']' ++ '[' -n '' ']' ++ hash -r ++ export CHROME_TRACE_PREFIX=/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5 ++ CHROME_TRACE_PREFIX=/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5 ++ mkdir -p /mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5 ++ export PROF_TP_SIZE=4 ++ PROF_TP_SIZE=4 ++ export PROF_CP_SIZE=8 ++ PROF_CP_SIZE=8 ++ export PROF_BS=1 ++ PROF_BS=1 ++ for ctx_length in 1024 2048 4096 8192 12288 16384 24576 32768 40960 49152 65536 81920 98304 131072 ++ export PROF_CTX_LENGTH=1024 ++ PROF_CTX_LENGTH=1024 ++ name='/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L1024*tp4.cp8.bs1.json' ++ '[' -f '/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L1024*tp4.cp8.bs1.json' ']' ++ echo 'Running ctx_length=1024, TP_SIZE=4, CP_SIZE=8, BATCH_SIZE=1' ++ srun bash ./attnserver.sh ++ which python3 ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 0 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 1024 --max-position-embeddings 1024 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 3 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 1024 --max-position-embeddings 1024 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 1 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 1024 --max-position-embeddings 1024 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 2 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 1024 --max-position-embeddings 1024 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:23:32.040000 150795 site-packages/torch/distributed/run.py:766] +W0621 21:23:32.040000 150795 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:23:32.040000 150795 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:23:32.040000 150795 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:23:32.139000 2069790 site-packages/torch/distributed/run.py:766] +W0621 21:23:32.139000 2069790 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:23:32.139000 2069790 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:23:32.139000 2069790 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:23:32.270000 3439287 site-packages/torch/distributed/run.py:766] +W0621 21:23:32.270000 3439287 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:23:32.270000 3439287 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:23:32.270000 3439287 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:23:32.276000 3370463 site-packages/torch/distributed/run.py:766] +W0621 21:23:32.276000 3370463 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:23:32.276000 3370463 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:23:32.276000 3370463 site-packages/torch/distributed/run.py:766] ***************************************** +[rank3]:[W621 21:23:54.937418488 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 3] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank7]:[W621 21:23:54.938038365 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 7] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank1]:[W621 21:23:54.938418575 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 1] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank27]:[W621 21:23:54.347054762 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 27] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank31]:[W621 21:23:54.347056923 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 31] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank5]:[W621 21:23:54.939036963 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 5] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank9]:[W621 21:23:54.827861786 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 9] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank19]:[W621 21:23:54.280409857 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 19] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank17]:[W621 21:23:54.281332808 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 17] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank13]:[W621 21:23:54.828626965 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 13] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank6]:[W621 21:23:54.945331266 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 6] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank15]:[W621 21:23:54.829689328 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 15] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank11]:[W621 21:23:54.829702555 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 11] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank29]:[W621 21:23:54.354101755 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 29] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank21]:[W621 21:23:54.281936851 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 21] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank25]:[W621 21:23:54.354498893 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 25] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank2]:[W621 21:23:54.945868403 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 2] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank4]:[W621 21:23:54.948810866 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 4] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank14]:[W621 21:23:54.836821806 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 14] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank23]:[W621 21:23:54.289761017 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 23] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank10]:[W621 21:23:54.836878904 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 10] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank12]:[W621 21:23:54.837529662 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 12] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank30]:[W621 21:23:54.362376084 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 30] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank28]:[W621 21:23:54.362434320 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 28] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank26]:[W621 21:23:54.362493142 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 26] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank20]:[W621 21:23:54.296067739 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 20] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank18]:[W621 21:23:54.296067731 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 18] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank22]:[W621 21:23:54.296186374 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 22] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank24]:[W621 21:23:55.447548457 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 24] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank16]:[W621 21:23:55.382025691 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 16] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank8]:[W621 21:23:55.929476421 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 8] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank0]:[W621 21:23:55.072209273 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 0] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +[rank2]:[W621 21:24:27.061921547 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank0]:[W621 21:24:27.099100418 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank3]:[W621 21:24:27.126701797 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank1]:[W621 21:24:27.186205203 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank30]:[W621 21:24:27.838345641 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank25]:[W621 21:24:27.840716682 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank11]:[W621 21:24:27.347506688 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank9]:[W621 21:24:27.349813115 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank27]:[W621 21:24:27.878723249 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank8]:[W621 21:24:27.366021066 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank4]:[W621 21:24:27.499116447 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank28]:[W621 21:24:27.907338298 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank5]:[W621 21:24:27.503241294 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank7]:[W621 21:24:27.506926596 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank6]:[W621 21:24:27.510592951 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank14]:[W621 21:24:27.397627067 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank18]:[W621 21:24:27.901037208 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank21]:[W621 21:24:27.963835026 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank23]:[W621 21:24:27.967580915 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank22]:[W621 21:24:27.152955431 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank26]:[W621 21:24:27.221705617 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank15]:[W621 21:24:27.706196362 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank10]:[W621 21:24:27.710554590 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank13]:[W621 21:24:27.712949786 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank16]:[W621 21:24:27.191226910 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank29]:[W621 21:24:27.294822529 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank31]:[W621 21:24:27.328556875 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank17]:[W621 21:24:27.276832090 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank20]:[W621 21:24:27.281809920 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank12]:[W621 21:24:27.829038905 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank24]:[W621 21:24:27.350482504 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank19]:[W621 21:24:27.300469705 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) ++ set +x ++ set +x ++ set +x ++ set +x ++ for ctx_length in 1024 2048 4096 8192 12288 16384 24576 32768 40960 49152 65536 81920 98304 131072 ++ export PROF_CTX_LENGTH=2048 ++ PROF_CTX_LENGTH=2048 ++ name='/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L2048*tp4.cp8.bs1.json' ++ '[' -f '/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L2048*tp4.cp8.bs1.json' ']' ++ echo 'Running ctx_length=2048, TP_SIZE=4, CP_SIZE=8, BATCH_SIZE=1' ++ srun bash ./attnserver.sh +rm: cannot remove 'gpt-checkpoint/iter_0000010': Directory not empty ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 2 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 2048 --max-position-embeddings 2048 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 0 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 2048 --max-position-embeddings 2048 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 3 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 2048 --max-position-embeddings 2048 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 1 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 2048 --max-position-embeddings 2048 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:24:33.569000 154304 site-packages/torch/distributed/run.py:766] +W0621 21:24:33.569000 154304 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:24:33.569000 154304 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:24:33.569000 154304 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:24:33.770000 3442873 site-packages/torch/distributed/run.py:766] +W0621 21:24:33.770000 3442873 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:24:33.770000 3442873 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:24:33.770000 3442873 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:24:33.770000 2073243 site-packages/torch/distributed/run.py:766] +W0621 21:24:33.770000 2073243 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:24:33.770000 2073243 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:24:33.770000 2073243 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:24:33.772000 3374033 site-packages/torch/distributed/run.py:766] +W0621 21:24:33.772000 3374033 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:24:33.772000 3374033 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:24:33.772000 3374033 site-packages/torch/distributed/run.py:766] ***************************************** +[rank5]:[W621 21:24:56.727648097 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 5] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank1]:[W621 21:24:56.727648102 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 1] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank9]:[W621 21:24:56.616494966 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 9] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank29]:[W621 21:24:56.137214610 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 29] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank25]:[W621 21:24:56.137221176 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 25] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank13]:[W621 21:24:56.616865604 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 13] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank17]:[W621 21:24:56.070700016 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 17] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank21]:[W621 21:24:56.070827630 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 21] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank10]:[W621 21:24:56.620458339 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 10] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank4]:[W621 21:24:56.735219856 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 4] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank6]:[W621 21:24:56.735227003 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 6] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank7]:[W621 21:24:56.735270388 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 7] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank2]:[W621 21:24:56.736468217 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 2] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank3]:[W621 21:24:56.736911286 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 3] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank12]:[W621 21:24:56.623957686 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 12] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank22]:[W621 21:24:56.081576440 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 22] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank23]:[W621 21:24:56.081587570 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 23] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank19]:[W621 21:24:56.081635127 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 19] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank20]:[W621 21:24:56.081656167 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 20] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank18]:[W621 21:24:56.082200117 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 18] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank27]:[W621 21:24:56.154262938 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 27] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank31]:[W621 21:24:56.154298049 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 31] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank28]:[W621 21:24:56.154369869 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 28] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank30]:[W621 21:24:56.154603616 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 30] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank26]:[W621 21:24:56.154773057 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 26] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank15]:[W621 21:24:56.633403850 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 15] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank11]:[W621 21:24:56.633451058 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 11] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank14]:[W621 21:24:56.633722504 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 14] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank8]:[W621 21:24:56.716677130 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 8] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank16]:[W621 21:24:56.170660883 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 16] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank24]:[W621 21:24:56.239976309 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 24] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank0]:[W621 21:24:56.873195844 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 0] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +[rank1]:[W621 21:25:30.163774475 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank0]:[W621 21:25:30.207204610 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank2]:[W621 21:25:30.438105977 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank3]:[W621 21:25:30.508986837 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank7]:[W621 21:25:30.645250537 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank29]:[W621 21:25:30.062419050 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank16]:[W621 21:25:30.016615411 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank21]:[W621 21:25:30.054445765 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank10]:[W621 21:25:30.613743555 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank13]:[W621 21:25:30.631802012 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank11]:[W621 21:25:30.683338728 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank22]:[W621 21:25:30.187937386 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank19]:[W621 21:25:30.217361146 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank5]:[W621 21:25:30.936561477 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank17]:[W621 21:25:31.699184359 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank20]:[W621 21:25:31.721938122 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank8]:[W621 21:25:31.269259500 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank18]:[W621 21:25:31.741069802 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank6]:[W621 21:25:31.401734014 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank4]:[W621 21:25:31.402598846 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank23]:[W621 21:25:31.750896086 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank26]:[W621 21:25:31.819846323 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank31]:[W621 21:25:31.839757649 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank24]:[W621 21:25:31.841252407 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank15]:[W621 21:25:31.419234490 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank25]:[W621 21:25:31.948938757 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank30]:[W621 21:25:31.950688396 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank27]:[W621 21:25:31.960889449 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank28]:[W621 21:25:31.972158843 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank14]:[W621 21:25:31.459667491 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank12]:[W621 21:25:31.461198169 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank9]:[W621 21:25:31.467943403 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) ++ set +x ++ set +x ++ set +x ++ set +x ++ for ctx_length in 1024 2048 4096 8192 12288 16384 24576 32768 40960 49152 65536 81920 98304 131072 ++ export PROF_CTX_LENGTH=4096 ++ PROF_CTX_LENGTH=4096 ++ name='/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L4096*tp4.cp8.bs1.json' ++ '[' -f '/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L4096*tp4.cp8.bs1.json' ']' ++ echo 'Running ctx_length=4096, TP_SIZE=4, CP_SIZE=8, BATCH_SIZE=1' ++ srun bash ./attnserver.sh ++ which python3 ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 1 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 4096 --max-position-embeddings 4096 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 2 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 4096 --max-position-embeddings 4096 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 0 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 4096 --max-position-embeddings 4096 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 3 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 4096 --max-position-embeddings 4096 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:25:37.259000 157797 site-packages/torch/distributed/run.py:766] +W0621 21:25:37.259000 157797 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:25:37.259000 157797 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:25:37.259000 157797 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:25:37.292000 3446512 site-packages/torch/distributed/run.py:766] +W0621 21:25:37.292000 3446512 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:25:37.292000 3446512 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:25:37.292000 3446512 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:25:37.292000 3377596 site-packages/torch/distributed/run.py:766] +W0621 21:25:37.292000 3377596 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:25:37.292000 3377596 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:25:37.292000 3377596 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:25:37.313000 2076677 site-packages/torch/distributed/run.py:766] +W0621 21:25:37.313000 2076677 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:25:37.313000 2076677 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:25:37.313000 2076677 site-packages/torch/distributed/run.py:766] ***************************************** +[rank3]:[W621 21:26:00.465424534 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 3] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank7]:[W621 21:26:00.465799150 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 7] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank11]:[W621 21:26:00.353564736 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 11] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank15]:[W621 21:26:00.353590447 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 15] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank31]:[W621 21:26:00.875450187 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 31] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank27]:[W621 21:26:00.875454847 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 27] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank19]:[W621 21:26:00.811624868 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 19] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank23]:[W621 21:26:00.811943175 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 23] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank4]:[W621 21:26:00.723927154 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 4] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank6]:[W621 21:26:00.724214370 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 6] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank26]:[W621 21:26:00.133332173 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 26] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank30]:[W621 21:26:00.133411642 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 30] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank2]:[W621 21:26:00.725296228 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 2] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank28]:[W621 21:26:00.134155316 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 28] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank20]:[W621 21:26:00.066320642 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 20] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank18]:[W621 21:26:00.066461856 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 18] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank5]:[W621 21:26:00.729202920 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 5] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank1]:[W621 21:26:00.729999769 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 1] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank25]:[W621 21:26:00.140614672 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 25] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank29]:[W621 21:26:00.140647593 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 29] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank21]:[W621 21:26:00.073602707 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 21] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank17]:[W621 21:26:00.073625240 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 17] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank22]:[W621 21:26:00.073978023 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 22] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank14]:[W621 21:26:00.624506198 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 14] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank9]:[W621 21:26:00.625357910 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 9] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank13]:[W621 21:26:00.625356850 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 13] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank10]:[W621 21:26:00.625370588 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 10] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank12]:[W621 21:26:00.625459479 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 12] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank16]:[W621 21:26:00.159767466 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 16] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank8]:[W621 21:26:00.707816714 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 8] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank0]:[W621 21:26:00.853497516 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 0] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank24]:[W621 21:26:01.576956837 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 24] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +[rank0]:[W621 21:26:31.497323570 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank2]:[W621 21:26:31.510916393 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank3]:[W621 21:26:31.514888659 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank1]:[W621 21:26:31.593503256 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank5]:[W621 21:26:31.785024114 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank19]:[W621 21:26:31.212374320 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank15]:[W621 21:26:31.800068532 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank14]:[W621 21:26:31.827737547 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank17]:[W621 21:26:31.306483522 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank22]:[W621 21:26:31.310897776 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank8]:[W621 21:26:31.887520723 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank7]:[W621 21:26:32.124959954 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank16]:[W621 21:26:32.542252345 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank6]:[W621 21:26:32.222970292 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank4]:[W621 21:26:32.232846985 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank27]:[W621 21:26:32.644226632 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank25]:[W621 21:26:32.647271633 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank24]:[W621 21:26:32.651330612 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank26]:[W621 21:26:32.651437516 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank9]:[W621 21:26:32.146444465 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank20]:[W621 21:26:32.683492201 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank23]:[W621 21:26:32.687431023 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank11]:[W621 21:26:32.254147274 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank10]:[W621 21:26:32.291080493 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank21]:[W621 21:26:32.771012386 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank18]:[W621 21:26:32.784298976 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank12]:[W621 21:26:32.341482170 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank30]:[W621 21:26:32.913378202 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank31]:[W621 21:26:32.916673599 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank29]:[W621 21:26:32.950470547 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank28]:[W621 21:26:32.953765601 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank13]:[W621 21:26:32.439091028 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) ++ set +x ++ set +x ++ set +x ++ set +x ++ for ctx_length in 1024 2048 4096 8192 12288 16384 24576 32768 40960 49152 65536 81920 98304 131072 ++ export PROF_CTX_LENGTH=8192 ++ PROF_CTX_LENGTH=8192 ++ name='/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L8192*tp4.cp8.bs1.json' ++ '[' -f '/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L8192*tp4.cp8.bs1.json' ']' ++ echo 'Running ctx_length=8192, TP_SIZE=4, CP_SIZE=8, BATCH_SIZE=1' ++ srun bash ./attnserver.sh +rm: cannot remove 'gpt-checkpoint/iter_0000010': Directory not empty +rm: cannot remove 'gpt-checkpoint/iter_0000010': Directory not empty ++ which python3 ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 2 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 8192 --max-position-embeddings 8192 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 1 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 8192 --max-position-embeddings 8192 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 0 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 8192 --max-position-embeddings 8192 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 3 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 8192 --max-position-embeddings 8192 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:26:38.390000 2080035 site-packages/torch/distributed/run.py:766] +W0621 21:26:38.390000 2080035 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:26:38.390000 2080035 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:26:38.390000 2080035 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:26:38.411000 161212 site-packages/torch/distributed/run.py:766] +W0621 21:26:38.411000 161212 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:26:38.411000 161212 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:26:38.411000 161212 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:26:38.503000 3381085 site-packages/torch/distributed/run.py:766] +W0621 21:26:38.503000 3381085 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:26:38.503000 3381085 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:26:38.503000 3381085 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:26:38.548000 3450002 site-packages/torch/distributed/run.py:766] +W0621 21:26:38.548000 3450002 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:26:38.548000 3450002 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:26:38.548000 3450002 site-packages/torch/distributed/run.py:766] ***************************************** +[rank6]:[W621 21:27:01.025969879 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 6] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank2]:[W621 21:27:01.026058175 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 2] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank14]:[W621 21:27:01.915506474 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 14] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank22]:[W621 21:27:01.369113907 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 22] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank26]:[W621 21:27:01.437068397 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 26] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank30]:[W621 21:27:01.437068262 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 30] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank31]:[W621 21:27:01.437068431 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 31] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank10]:[W621 21:27:01.915552948 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 10] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank18]:[W621 21:27:01.369285508 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 18] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank7]:[W621 21:27:02.033098201 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 7] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank3]:[W621 21:27:02.033251030 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 3] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank1]:[W621 21:27:02.033758666 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 1] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank4]:[W621 21:27:02.033824508 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 4] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank5]:[W621 21:27:02.034205148 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 5] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank29]:[W621 21:27:02.443257365 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 29] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank12]:[W621 21:27:01.923036462 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 12] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank20]:[W621 21:27:02.376243684 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 20] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank11]:[W621 21:27:01.923551187 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 11] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank9]:[W621 21:27:01.923557427 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 9] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank19]:[W621 21:27:02.376354730 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 19] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank17]:[W621 21:27:02.376367297 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 17] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank13]:[W621 21:27:01.923613783 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 13] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank15]:[W621 21:27:01.923665772 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 15] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank21]:[W621 21:27:02.377785041 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 21] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank23]:[W621 21:27:02.379700291 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 23] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank25]:[W621 21:27:02.452806080 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 25] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank28]:[W621 21:27:02.452952297 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 28] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank27]:[W621 21:27:02.453512126 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 27] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank8]:[W621 21:27:02.013124653 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 8] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank16]:[W621 21:27:02.466578762 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 16] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank24]:[W621 21:27:02.543067932 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 24] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank0]:[W621 21:27:02.172071400 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 0] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +[rank1]:[W621 21:27:34.716530631 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank3]:[W621 21:27:34.752177875 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank5]:[W621 21:27:34.850798134 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank2]:[W621 21:27:34.939049137 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank0]:[W621 21:27:34.941013521 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank4]:[W621 21:27:35.068606538 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank8]:[W621 21:27:35.177191237 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank16]:[W621 21:27:35.639754602 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank28]:[W621 21:27:35.728527558 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank6]:[W621 21:27:35.326113865 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank29]:[W621 21:27:35.742969320 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank31]:[W621 21:27:35.745294218 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank26]:[W621 21:27:35.774498334 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank13]:[W621 21:27:35.261940929 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank17]:[W621 21:27:35.725049327 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank23]:[W621 21:27:35.737263972 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank22]:[W621 21:27:35.776673058 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank11]:[W621 21:27:35.344871890 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank7]:[W621 21:27:35.458243476 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank10]:[W621 21:27:35.354348904 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank9]:[W621 21:27:35.423273599 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank15]:[W621 21:27:35.436208643 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank12]:[W621 21:27:35.439148792 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank14]:[W621 21:27:35.485746171 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank19]:[W621 21:27:35.939882058 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank30]:[W621 21:27:35.319409817 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank27]:[W621 21:27:35.401218324 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank21]:[W621 21:27:35.339539405 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank25]:[W621 21:27:35.408477170 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank18]:[W621 21:27:35.341800400 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank24]:[W621 21:27:35.413525399 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank20]:[W621 21:27:35.355246634 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) ++ set +x ++ set +x ++ set +x ++ set +x ++ for ctx_length in 1024 2048 4096 8192 12288 16384 24576 32768 40960 49152 65536 81920 98304 131072 ++ export PROF_CTX_LENGTH=12288 ++ PROF_CTX_LENGTH=12288 ++ name='/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L12288*tp4.cp8.bs1.json' ++ '[' -f '/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L12288*tp4.cp8.bs1.json' ']' ++ echo 'Running ctx_length=12288, TP_SIZE=4, CP_SIZE=8, BATCH_SIZE=1' ++ srun bash ./attnserver.sh ++ which python3 ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 0 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 12288 --max-position-embeddings 12288 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 1 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 12288 --max-position-embeddings 12288 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 2 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 12288 --max-position-embeddings 12288 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 3 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 12288 --max-position-embeddings 12288 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:27:42.133000 164632 site-packages/torch/distributed/run.py:766] +W0621 21:27:42.133000 164632 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:27:42.133000 164632 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:27:42.133000 164632 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:27:42.201000 2083398 site-packages/torch/distributed/run.py:766] +W0621 21:27:42.201000 2083398 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:27:42.201000 2083398 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:27:42.201000 2083398 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:27:42.209000 3384577 site-packages/torch/distributed/run.py:766] +W0621 21:27:42.209000 3384577 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:27:42.209000 3384577 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:27:42.209000 3384577 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:27:42.220000 3453495 site-packages/torch/distributed/run.py:766] +W0621 21:27:42.220000 3453495 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:27:42.220000 3453495 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:27:42.220000 3453495 site-packages/torch/distributed/run.py:766] ***************************************** +[rank28]:[W621 21:28:06.673216429 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 28] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank27]:[W621 21:28:06.673277306 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 27] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank31]:[W621 21:28:06.673676798 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 31] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank11]:[W621 21:28:06.153353009 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 11] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank12]:[W621 21:28:06.153356109 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 12] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank15]:[W621 21:28:06.154340453 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 15] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank13]:[W621 21:28:06.154396314 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 13] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank25]:[W621 21:28:06.676397942 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 25] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank9]:[W621 21:28:06.154440490 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 9] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank23]:[W621 21:28:06.611843238 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 23] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank17]:[W621 21:28:06.611851171 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 17] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank20]:[W621 21:28:06.612054635 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 20] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank21]:[W621 21:28:06.612137178 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 21] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank22]:[W621 21:28:06.614111543 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 22] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank19]:[W621 21:28:06.614450332 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 19] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank26]:[W621 21:28:06.683102168 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 26] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank29]:[W621 21:28:06.683275451 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 29] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank30]:[W621 21:28:06.683319370 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 30] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank5]:[W621 21:28:06.276375781 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 5] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank10]:[W621 21:28:06.164083263 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 10] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank3]:[W621 21:28:06.276413914 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 3] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank14]:[W621 21:28:06.164155012 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 14] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank4]:[W621 21:28:06.276455365 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 4] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank6]:[W621 21:28:06.276483549 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 6] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank18]:[W621 21:28:06.620408595 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 18] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank1]:[W621 21:28:06.276609870 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 1] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank7]:[W621 21:28:06.277072941 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 7] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank2]:[W621 21:28:06.277851779 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 2] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank8]:[W621 21:28:06.240425807 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 8] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank24]:[W621 21:28:06.762617808 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 24] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank16]:[W621 21:28:06.698214619 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 16] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank0]:[W621 21:28:06.424270369 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 0] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +[rank25]: Traceback (most recent call last): +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank25]: pretrain( +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank25]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank25]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank25]: ^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank25]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank25]: return _load_global_dist_base_checkpoint( +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank25]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank25]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank5]: Traceback (most recent call last): +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank5]: pretrain( +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank5]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank5]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank5]: ^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank5]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank25]: checkpoint.load_state_dict( +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank25]: return arg(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank25]: return _load_state_dict( +[rank25]: ^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank17]: Traceback (most recent call last): +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank17]: pretrain( +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank17]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank17]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank17]: ^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank17]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank13]: Traceback (most recent call last): +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank13]: pretrain( +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank13]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank13]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank13]: ^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank13]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank5]: return _load_global_dist_base_checkpoint( +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank5]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank5]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank25]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank25]: raise result +[rank25]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank25]: Traceback (most recent call last): (RANK 0) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank17]: return _load_global_dist_base_checkpoint( +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank17]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank17]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank13]: return _load_global_dist_base_checkpoint( +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank13]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank13]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank5]: checkpoint.load_state_dict( +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank5]: return arg(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank5]: return _load_state_dict( +[rank5]: ^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank17]: checkpoint.load_state_dict( +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank17]: return arg(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank17]: return _load_state_dict( +[rank17]: ^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank13]: checkpoint.load_state_dict( +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank13]: return arg(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank13]: return _load_state_dict( +[rank13]: ^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank5]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank5]: raise result +[rank5]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank5]: Traceback (most recent call last): (RANK 0) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank17]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank17]: raise result +[rank17]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank17]: Traceback (most recent call last): (RANK 0) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank13]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank13]: raise result +[rank13]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank13]: Traceback (most recent call last): (RANK 0) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 1) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 2) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank5]: Traceback (most recent call last): (RANK 1) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 1) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 1) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 2) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: raise CheckpointingException(_msg) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 2) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 2) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 3) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 4) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank17]: raise CheckpointingException(_msg) +[rank13]: raise CheckpointingException(_msg) +[rank5]: Traceback (most recent call last): (RANK 3) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 3) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 3) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 4) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 5) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 4) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 4) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 6) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 5) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 5) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 5) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 7) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 6) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 6) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 6) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank25]: Traceback (most recent call last): (RANK 8) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 7) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 7) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 7) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 8) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 9) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: Traceback (most recent call last): (RANK 8) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: Traceback (most recent call last): (RANK 8) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 10) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 9) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 9) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: Traceback (most recent call last): (RANK 9) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 10) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 11) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: Traceback (most recent call last): (RANK 10) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: Traceback (most recent call last): (RANK 10) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 12) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 11) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 11) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: Traceback (most recent call last): (RANK 11) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 13) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 12) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: Traceback (most recent call last): (RANK 12) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: Traceback (most recent call last): (RANK 12) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 13) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 13) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 13) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank25]: Traceback (most recent call last): (RANK 14) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 15) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 14) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank17]: Traceback (most recent call last): (RANK 14) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: Traceback (most recent call last): (RANK 14) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 15) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 15) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 15) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank25]: Traceback (most recent call last): (RANK 16) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 16) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 17) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank17]: Traceback (most recent call last): (RANK 16) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: Traceback (most recent call last): (RANK 16) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 18) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 17) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 17) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: Traceback (most recent call last): (RANK 17) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 18) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 19) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 18) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 18) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 20) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 19) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 19) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank5]: Traceback (most recent call last): (RANK 19) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 21) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 20) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: ^^^^^^^^^ +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 20) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 20) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 21) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 21) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 21) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 22) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^ +[rank13]: ^^^^^^^^^ +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 23) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 22) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 22) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 22) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 24) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 23) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 23) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 23) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 25) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 24) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 24) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 24) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 26) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 25) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 25) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: Traceback (most recent call last): (RANK 25) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 27) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 26) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 26) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 26) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 28) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 27) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 27) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: Traceback (most recent call last): (RANK 27) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: Traceback (most recent call last): (RANK 29) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 28) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 28) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 28) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 30) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: Traceback (most recent call last): (RANK 29) +[rank13]: Traceback (most recent call last): (RANK 29) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 29) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 31) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 30) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 30) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 30) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +[rank28]: Traceback (most recent call last): +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank28]: pretrain( +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank28]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 31) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 31) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 31) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank28]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank28]: ^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank28]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank28]: return _load_global_dist_base_checkpoint( +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +[rank4]: Traceback (most recent call last): +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank4]: pretrain( +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank28]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank28]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank28]: checkpoint.load_state_dict( +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +[rank18]: Traceback (most recent call last): +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank18]: pretrain( +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank18]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +[rank12]: Traceback (most recent call last): +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank12]: pretrain( +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank12]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank4]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank28]: return arg(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank28]: return _load_state_dict( +[rank28]: ^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank28]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank18]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank18]: ^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank18]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank18]: return _load_global_dist_base_checkpoint( +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank12]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank12]: ^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank12]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank12]: return _load_global_dist_base_checkpoint( +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank4]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank4]: ^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank4]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank4]: return _load_global_dist_base_checkpoint( +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank28]: raise result +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank18]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank18]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank18]: checkpoint.load_state_dict( +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank12]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank12]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank12]: checkpoint.load_state_dict( +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank4]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank4]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank4]: checkpoint.load_state_dict( +[rank28]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank28]: Traceback (most recent call last): (RANK 0) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank18]: return arg(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank18]: return _load_state_dict( +[rank18]: ^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank18]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank12]: return arg(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank12]: return _load_state_dict( +[rank12]: ^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank12]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank4]: return arg(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank4]: return _load_state_dict( +[rank4]: ^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank4]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 1) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank18]: raise result +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank12]: raise result +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank4]: raise result +[rank4]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank4]: Traceback (most recent call last): (RANK 0) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank18]: Traceback (most recent call last): (RANK 0) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank12]: Traceback (most recent call last): (RANK 0) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 2) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 1) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 1) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 1) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 3) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 2) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 2) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 2) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 4) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 3) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 3) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 3) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: ^^^^^^^^^ +[rank28]: Traceback (most recent call last): (RANK 5) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 4) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 4) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", lin[rank31]: Traceback (most recent call last): +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank31]: pretrain( +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 4) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank31]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank31]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank31]: ^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank31]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: Traceback (most recent call last): (RANK 5) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: Traceback (most recent call last): (RANK 5) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 5) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank31]: return _load_global_dist_base_checkpoint( +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank31]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank31]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", lin[rank9]: Traceback (most recent call last): +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank9]: pretrain( +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank31]: checkpoint.load_state_dict( +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank31]: return arg(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank31]: return _load_state_dict( +[rank31]: ^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank31]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 6) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank9]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank9]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank9]: ^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank9]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: raise CheckpointingException(_msg) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank9]: return _load_global_dist_base_checkpoint( +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank9]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank9]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 6) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank31]: raise result +[rank31]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank31]: Traceback (most recent call last): (RANK 0) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: re[rank26]: Traceback (most recent call last): +[rank18]: Traceback (most recent call last): (RANK 7) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank9]: checkpoint.load_state_dict( +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank9]: return arg(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank9]: return _load_state_dict( +[rank9]: ^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank9]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 7) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank26]: pretrain( +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank26]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank26]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank26]: ^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank26]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank26]: return _load_global_dist_base_checkpoint( +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank26]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank26]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 8) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: raise result +[rank9]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank9]: Traceback (most recent call last): (RANK 0) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: [rank8]: Traceback (most recent call last): +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank8]: pretrain( +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 8) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank26]: checkpoint.load_state_dict( +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank26]: return arg(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank26]: return _load_state_dict( +[rank26]: ^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank8]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank8]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank8]: ^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank8]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: local_plan = planner.create_local_plan() +[rank26]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: Traceback (most recent call last): (RANK 9) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank8]: return _load_global_dist_base_checkpoint( +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank8]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank8]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 9) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank26]: raise result +[rank26]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank26]: Traceback (most recent call last): (RANK 0) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: re[rank30]: Traceback (most recent call last): +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 10) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank8]: checkpoint.load_state_dict( +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank8]: return arg(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank8]: return _load_state_dict( +[rank8]: ^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank8]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank30]: pretrain( +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank30]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank30]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank30]: ^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank30]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 10) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank30]: return _load_global_dist_base_checkpoint( +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank30]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank30]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096))[rank19]: Traceback (most recent call last): +[rank8]: raise result +[rank8]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank8]: Traceback (most recent call last): (RANK 0) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: [rank10]: Traceback (most recent call last): +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank10]: pretrain( +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 11) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.1[rank0]: Traceback (most recent call last): +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank30]: checkpoint.load_state_dict( +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank30]: return arg(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank30]: return _load_state_dict( +[rank30]: ^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank19]: pretrain( +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank19]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank19]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank19]: ^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank19]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank10]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank10]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank10]: ^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank10]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank30]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank19]: return _load_global_dist_base_checkpoint( +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank19]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank19]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank10]: return _load_global_dist_base_checkpoint( +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank10]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank10]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: pretrain( +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank0]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank0]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank0]: ^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank0]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank30]: raise result +[rank30]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank30]: Traceback (most recent call last): (RANK 0) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: re[rank24]: Traceback (most recent call last): +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank19]: checkpoint.load_state_dict( +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank19]: return arg(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank19]: return _load_state_dict( +[rank19]: ^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank10]: checkpoint.load_state_dict( +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank10]: return arg(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank10]: return _load_state_dict( +[rank10]: ^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank10]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank0]: return _load_global_dist_base_checkpoint( +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank0]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank0]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank24]: pretrain( +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank24]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank24]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank24]: ^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank24]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank19]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank0]: checkpoint.load_state_dict( +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank0]: return arg(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank0]: return _load_state_dict( +[rank0]: ^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank0]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank24]: return _load_global_dist_base_checkpoint( +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank24]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank24]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank19]: raise result +[rank19]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank19]: Traceback (most recent call last): (RANK 0) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank10]: raise result +[rank10]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank10]: Traceback (most recent call last): (RANK 0) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: re[rank14]: Traceback (most recent call last): +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank0]: raise result +[rank0]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank0]: Traceback (most recent call last): (RANK 0) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank24]: checkpoint.load_state_dict( +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank24]: return arg(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank24]: return _load_state_dict( +[rank24]: ^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank19]: re[rank16]: Traceback (most recent call last): +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank16]: pretrain( +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank16]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank16]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank16]: ^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank14]: pretrain( +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank14]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank14]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank14]: ^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank14]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank0]: result = func(*args, **kwargs) +[rank0]: [rank6]: Traceback (most recent call last): +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank24]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank16]: return _load_global_dist_base_checkpoint( +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank16]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank14]: return _load_global_dist_base_checkpoint( +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank14]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank14]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank6]: pretrain( +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank6]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank6]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank6]: ^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank6]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank24]: raise result +[rank24]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank24]: Traceback (most recent call last): (RANK 0) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: ree 605, in create_local_plan +[rank16]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank16]: checkpoint.load_state_dict( +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank16]: return arg(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank16]: return _load_state_dict( +[rank16]: ^^^^^^^^^^^^^^^^^ +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank14]: checkpoint.load_state_dict( +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank14]: return arg(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank14]: return _load_state_dict( +[rank14]: ^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank6]: return _load_global_dist_base_checkpoint( +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank6]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank6]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 6) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank16]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank14]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank6]: checkpoint.load_state_dict( +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank6]: return arg(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank6]: return _load_state_dict( +[rank6]: ^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank6]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank16]: raise result +[rank16]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank16]: Traceback (most recent call last): (RANK 0) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank14]: raise result +[rank14]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank14]: Traceback (most recent call last): (RANK 0) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: re ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank6]: raise result +[rank6]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank6]: Traceback (most recent call last): (RANK 0) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 7) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank16]: re[rank22]: Traceback (most recent call last): +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank22]: pretrain( +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank22]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank22]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank22]: ^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 1) +[rank6]: result = func(*args, **kwargs) +[rank6]: [rank1]: Traceback (most recent call last): +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank22]: return _load_global_dist_base_checkpoint( +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank22]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: pretrain( +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank1]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank1]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank1]: ^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank1]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 8) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in sult = func(*args, **kwargs) +[rank22]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank22]: checkpoint.load_state_dict( +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank22]: return arg(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank22]: return _load_state_dict( +[rank22]: ^^^^^^^^^^^^^^^^^ +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 2) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank1]: return _load_global_dist_base_checkpoint( +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank1]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank1]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank22]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank1]: checkpoint.load_state_dict( +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank1]: return arg(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank1]: return _load_state_dict( +[rank1]: ^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank1]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank26]: Traceback (most recent call last): (RANK 1) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank22]: raise result +[rank22]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank22]: Traceback (most recent call last): (RANK 0) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank1]: raise result +[rank1]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank1]: Traceback (most recent call last): (RANK 0) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 2) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: re[rank23]: Traceback (most recent call last): +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank23]: pretrain( +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank23]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank23]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank23]: ^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank9]: Traceback (most recent call last): (RANK 3)e 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 6) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank1]: result = func(*args, **kwargs) +[rank1]: 2/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank23]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank23]: return _load_global_dist_base_checkpoint( +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank23]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embeddingsult = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank23]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank23]: checkpoint.load_state_dict( +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank23]: return arg(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank23]: return _load_state_dict( +[rank23]: ^^^^^^^^^^^^^^^^^ +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 7) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 12) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 1) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank23]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 13) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 2) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank23]: raise result +[rank23]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank23]: Traceback (most recent call last): (RANK 0) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 8) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_c ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: re[rank21]: Traceback (most recent call last): +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank21]: pretrain( +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank21]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank21]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank21]: ^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in [rank15]: Traceback (most recent call last): +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank15]: pretrain( +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank15]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank15]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank15]: ^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 1) +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank21]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank21]: return _load_global_dist_base_checkpoint( +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank21]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank15]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank15]: return _load_global_dist_base_checkpoint( +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank15]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding[rank29]: Traceback (most recent call last): +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank29]: pretrain( +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank29]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank29]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank29]: ^^^^^^^^^^^^^^^^ +[rank21]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank21]: checkpoint.load_state_dict( +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank21]: return arg(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank21]: return _load_state_dict( +[rank21]: ^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank15]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank15]: checkpoint.load_state_dict( +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank15]: return arg(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank15]: return _load_state_dict( +[rank15]: ^^^^^^^^^^^^^^^^^ +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 2) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank29]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank29]: return _load_global_dist_base_checkpoint( +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank29]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank21]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank15]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank15]: raise result +[rank15]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank15]: Traceback (most recent call last): (RANK 0) +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank29]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank29]: checkpoint.load_state_dict( +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank29]: return arg(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank29]: return _load_state_dict( +[rank29]: ^^^^^^^^^^^^^^^^^ +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank21]: raise result +[rank21]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank21]: Traceback (most recent call last): (RANK 0) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: re[rank11]: Traceback (most recent call last): +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank11]: pretrain( +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank11]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank21]: re tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 11) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank11]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank11]: ^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank11]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank11]: return _load_global_dist_base_checkpoint( +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: Traceback (most recent call last): (RANK 3)[rank7]: Traceback (most recent call last): +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank7]: pretrain( +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank7]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank7]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank7]: ^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank29]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank29]: raise result +[rank29]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank29]: Traceback (most recent call last): (RANK 0) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 12) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank11]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank11]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank11]: checkpoint.load_state_dict( +[rank7]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank7]: return _load_global_dist_base_checkpoint( +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank7]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank11]: return arg(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank11]: return _load_state_dict( +[rank11]: ^^^^^^^^^^^^^^^^^ +[rank7]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank7]: checkpoint.load_state_dict( +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank7]: return arg(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank7]: return _load_state_dict( +[rank7]: ^^^^^^^^^^^^^^^^^ +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 1) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 13) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank11]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank11]: raise result +[rank11]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank11]: Traceback (most recent call last): (RANK 0) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank7]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank7]: raise result +[rank7]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank7]: Traceback (most recent call last): (RANK 0) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 2) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointisult = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: re ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: ^^^^^^^^^ +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 1) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 1) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 2) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 2) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 1) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding[rank27]: Traceback (most recent call last): +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank27]: pretrain( +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank27]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank27]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank27]: ^^^^^^^^^^^^^^^^ +[rank19]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 2) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank27]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank27]: return _load_global_dist_base_checkpoint( +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank27]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank27]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank27]: checkpoint.load_state_dict( +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank27]: return arg(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank27]: return _load_state_dict( +[rank27]: ^^^^^^^^^^^^^^^^^ +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embeddingsult = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 3)sult = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: raise CheckpointingException(_msg) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank27]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank27]: raise result +[rank27]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank27]: Traceback (most recent call last): (RANK 0) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 1) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 1) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 3)[rank2]: Traceback (most recent call last): +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank2]: pretrain( +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank2]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank2]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: rewrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 2) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: ^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank2]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank2]: return _load_global_dist_base_checkpoint( +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank2]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 9) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: ^^^^^^^^^ +[rank10]: Traceback (most recent call last): (RANK 2) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank2]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank2]: checkpoint.load_state_dict( +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank2]: return arg(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^ +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank2]: return _load_state_dict( +[rank2]: ^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank2]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank2]: raise result +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 10) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embeddingsult = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embeddingsult = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank2]: Traceback (most recent call last): (RANK 0) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 1) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 1) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)).position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 3) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 2) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 1) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: ^^^^^^^^^ +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 2) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 2) +[rank26]: Traceback (most recent call last): (RANK 4) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 5) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embeddingsult = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embeddingwrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", linsult = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 1) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 9) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 3)[rank3]: Traceback (most recent call last): +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank3]: pretrain( +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank3]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank3]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 1) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 2) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank3]: ^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank3]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank3]: return _load_global_dist_base_checkpoint( +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank3]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank21]: ^^^^^^^^^ +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank3]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank3]: checkpoint.load_state_dict( +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank3]: return arg(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^ +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank12]: Traceback (most recent call last): (RANK 10) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank3]: return _load_state_dict( +[rank3]: ^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank3]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank3]: raise result +[rank29]: Traceback (most recent call last): (RANK 2) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 3) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096))sult = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank3]: Traceback (most recent call last): (RANK 0) +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 11) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 4) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 1) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 1) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 12) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 5) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", lin.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 3) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: Traceback (most recent call last): (RANK 2) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 2) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 13) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 4) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 3) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointie 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 3)heckpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 14) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 6) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 4) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 7) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 5) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 15) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: ^^^^^^^^^ +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", lin.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 3) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 5) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 4) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", lin +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 16) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 8) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in .position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 3) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 4) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_load +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 4) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 5) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 4) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 5) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", linng/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 14) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 5) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 5) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", linng/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensosult = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 14) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 15) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 1) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tenso ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 1) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: Traceback (most recent call last): (RANK 15) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 2) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: Traceback (most recent call last): (RANK 16) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/chec[rank20]: Traceback (most recent call last): +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank20]: pretrain( +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank20]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 2) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 16) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checsult = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank20]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank20]: ^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank20]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank20]: return _load_global_dist_base_checkpoint( +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank20]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank20]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank20]: checkpoint.load_state_dict( +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 4) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 3) ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: Traceback (most recent call last): (RANK 1) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank20]: return arg(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^ +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank20]: return _load_state_dict( +[rank20]: ^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank20]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank20]: raise result +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 5) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 1) +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 2) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank20]: Traceback (most recent call last): (RANK 0) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: re.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 3) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tenso.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 3) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embeddinge 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 2) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 6) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 4) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 4) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 3) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: Traceback (most recent call last): (RANK 7) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 5) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 5) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", linkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", lin tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 11) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 4) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 8) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in kpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 17) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 5) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 12) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank28]: Traceback (most recent call last): (RANK 17) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensoer.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 18) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 17) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 18) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: Traceback (most recent call last): (RANK 13) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointi.position_embeddings.weight +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: Traceback (most recent call last): (RANK 3) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 18) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096e 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 4) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: Traceback (most recent call last): (RANK 9) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 6) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 19) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[ran +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 7) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 10) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: ^^^^^^^^^ +[rank11]: Traceback (most recent call last): (RANK 5) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 4) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)).position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 3) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 6) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 8) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in e 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 5) +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 4) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 6) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 7) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank10]: Traceback (most recent call last): (RANK 7) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 6) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: ^^^^^^^^^ +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 8) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank24]: Traceback (most recent call last): (RANK 5) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in ng/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 14) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 7) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", lin])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 19) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 8) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in ])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 19) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 20) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: raise CheckpointingException(_msg) +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 8) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 20) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 15) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/j +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 16) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 4) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 21) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 21) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/chec.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 3) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatrowrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatrosult = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 4) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 5) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 9) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 1) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 6) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 10) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: raise CheckpointingException(_msg) +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 7) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 2) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank0]: local_plan = planner.create_local_plan() +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096))n/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 22) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embeddingwrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 5) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 8) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 9) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", linrs) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 6) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 9) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 23) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 7) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 10) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 24) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 10) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 8) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 3) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096))n/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/je 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 6) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 4) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 22) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 11) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.1rs) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 6) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank18]: Traceback (most recent call last): (RANK 23) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 7) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 7) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 5) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", lin tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 11) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 24) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/sitee 605, in create_local_plan +[rank14]: Traceback (most recent call last): (RANK 8) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in rs) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 6) +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 8) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 12) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 6) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/jk4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: local_data = map_fun() +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 20) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 7) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 7) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 13) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 8) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 21) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointie 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 6) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 8) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in sult = func(*args, **kwargs) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raiunda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 9) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 9) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: Traceback (most recent call last): (RANK 1) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 10) +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 7) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 2) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 10) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 8) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 11) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.1rs) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 11) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 11) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.1wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 6) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 25) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 9) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 7) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank30]: Traceback (most recent call last): (RANK 12) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: ^^^^^^^^^ +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 26) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 13) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 10) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank7]: Traceback (most recent call last): (RANK 8) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/j +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fowrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096))unda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank26]: Traceback (most recent call last): (RANK 14) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 9) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 9) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 4) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 15) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 10) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 5) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 10) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tenso +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank26]: Traceback (most recent call last): (RANK 16) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checwrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)).position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 3) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 11) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 9) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 4) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 4) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: ^^^^^^^^^ +[rank8]: raise CheckpointingException(_msg) +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 12) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 5) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank27]: Traceback (most recent call last): (RANK 10) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 5) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 13) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tenso2/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096))ng/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", linr loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 27) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_ce 605, in create_local_plan +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 14) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 6) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 12) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: Traceback (most recent call last): (RANK 28) +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 15) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 13) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 29) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: Traceback (most recent call last): (RANK 7) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_cunda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 16) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/ha tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 11) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 8) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 9) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 17) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 11) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: Traceback (most recent call last): (RANK 12) +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 12) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 10) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 18) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 13) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 11) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.1se CheckpointingException(_msg) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096 tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 11) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointie 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 13) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 22) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 6) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointie 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 6) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 23) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 12) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^ +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 13) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 7) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointi-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank15]: Traceback (most recent call last): (RANK 7) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 24) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 8) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in o.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 8) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[r2/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: Traceback (most recent call last): (RANK 25) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 30) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in heckpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 14) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 12) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 13) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 26) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 31) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 15) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fo])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 19) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +ng/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 14) +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 16) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_cunda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loadng/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 14) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 9) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 20) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 21) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 15) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 10) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatroe 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 16) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 15) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 6) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checwrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 16) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 11) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.1rs) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 6) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_plan = planner.create_local_plan() +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 9) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checwrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 7) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 10) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: raise CheckpointingException(_msg) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 9) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 7) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 8) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in sult = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)).position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 3) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 10) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 8) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 1) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/jrs) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 6) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank23]: Traceback (most recent call last): (RANK 4) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096))er.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: Traceback (most recent call last): (RANK 17) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 2) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 5) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 18) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 7) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embeddingn/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", linkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 8) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 22) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 17) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 19) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rankpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/jheckpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 14) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank26]: Traceback (most recent call last): (RANK 23) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 17) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 15) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 24) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/siteng/strategies/torch.py", line 605, in create_local_plan +[rank19]: Traceback (most recent call last): (RANK 18) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 16) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 14) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096 tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 11) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank14]: Traceback (most recent call last): (RANK 18) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_load2/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 40962/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 12) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 12) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank27]: Traceback (most recent call last): (RANK 15) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 12) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 16) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checwrapper +[rank24]: result = func(*args, **kwargs) +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 13) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 13) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointie 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 13) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_cank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank24]: Traceback (most recent call last): (RANK 9) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 6) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_c tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 11) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 25) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 10) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 7) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 26) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank15]: Traceback (most recent call last): (RANK 12) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096))-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 8) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in ])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 19) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 13) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 27) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnheckpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 14) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointi])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 19) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 25) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 26) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 20) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 15) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 21) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank14]: Traceback (most recent call last): (RANK 20) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 16) +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fokpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatrong/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 21) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_load2/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: raise CheckpointingException(_msg) +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 14) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatroheckpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 14) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 17) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank21]: local_plan = planner.create_local_plan() +[rank9]: local_data = map_fun() +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 12) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 18) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 15) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 15) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 13) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096 tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 11) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 16) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checwrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 16) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_cunda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: local_plan = planner.create_local_plan() +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 9) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loadng/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 9) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 12) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank15]: Traceback (most recent call last): (RANK 14) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 13) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 15) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: Traceback (most recent call last): (RANK 10) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointir loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 27) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 11) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.1unda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 10) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 9) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank28]: Traceback (most recent call last): (RANK 28) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096))n/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 29) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 22) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 16) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checn/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/har loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 27) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: raise CheckpointingException(_msg) +[rank6]: Traceback (most recent call last): (RANK 10) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 23) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 22) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 11) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.1er.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 23) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 17) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: Traceback (most recent call last): (RANK 28) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 24) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/sitekpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 29) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 24) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/sitekpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 18) +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hang/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 17) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_plan = planner.create_local_plan() +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: Traceback (most recent call last): (RANK 14) +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 17) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 19) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[ranheckpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 18) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 14) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 15) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096 tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 11) +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 18) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096kpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 15) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 16) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/chec.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 3) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: local_plan = planner.create_local_plan() +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 12) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 17) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 16) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 13) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loadt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: Traceback (most recent call last): (RANK 4) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointi-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 18) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 28) +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 5) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", lino.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank19]: Traceback (most recent call last): (RANK 25) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 29) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 30) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 25) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.Checkpoiner.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 31) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 26) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 17) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 26) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: ^^^^^^^^^ +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 19) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fo])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 19) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fok8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 18) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank27]: Traceback (most recent call last): (RANK 20) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 20) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 20) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 19) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 21) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[ranheckpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 14) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatro.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 3) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 21) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 21) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatrong/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 14) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: rai])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 19) +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 15) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: Traceback (most recent call last): (RANK 4) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 16) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 5) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: Traceback (most recent call last): (RANK 15) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 20) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_load2/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 16) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checr loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 27) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: Traceback (most recent call last): (RANK 21) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatrowrapper +[rank11]: result = func(*args, **kwargs) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 12) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: Traceback (most recent call last): (RANK 6) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 13) +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 7) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 28) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 9) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_c2/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 8) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in kpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 29) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 10) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hawrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096))wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 12) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 17) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 9) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 13) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 10) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 9) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 18) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_ck1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096e 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096))n/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 22) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 10) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 6) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096))er.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 20) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 7) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 23) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 17) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 21) +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 8) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 9) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: Traceback (most recent call last): (RANK 24) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 11) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 18) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raier.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 19) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 17) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 10) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 12) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[ran])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 19) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096))n/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 18) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 13) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 20) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 22) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointikpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 19) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rantingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 30) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 21) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: Traceback (most recent call last): (RANK 23) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 17) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatrose CheckpointingException(_msg) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 31) +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 24) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/sitekpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 22) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: Traceback (most recent call last): (RANK 18) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 23) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +k0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096e 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 20) +[rank30]: Traceback (most recent call last): (RANK 17) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: Traceback (most recent call last): (RANK 6) +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 24) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 18) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: local_plan = planner.create_local_plan() +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 7) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[r tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 11) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 21) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096 tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 11) +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 12) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raier.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 8) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in o.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 17) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 12) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 13) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 30) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 18) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 13) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 31) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointin/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 22) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointi-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 19) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[ranheckpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + + tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 11) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 23) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank2]: Traceback (most recent call last): (RANK 14) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 25) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 24) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 26) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank22]: Traceback (most recent call last): (RANK 12) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 15) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 13) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 11) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 16) +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fowrapper +[rank31]: result = func(*args, **kwargs) +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointi-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 12) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loadheckpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank6]: Traceback (most recent call last): (RANK 14) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: Traceback (most recent call last): (RANK 9) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 25) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 13) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 10) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointir loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 27) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 15) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 26) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 16) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096))ng/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 14) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fong/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank14]: Traceback (most recent call last): (RANK 28) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loadse CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 22) +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 29) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 15) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: Traceback (most recent call last): (RANK 14) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 16) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 15) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hak9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 23) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: local_data = map_fun() +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 20) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 24) +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/chec])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 19) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 16) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/chec])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 19) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rk3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 20) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 21) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 20) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 21) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 20) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: rain/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 21) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 22) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 21) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatro tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 11) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatrowrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 23) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 12) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 22) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 9) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: raise CheckpointingException(_msg) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 13) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 24) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/siteank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 23) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 10) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 25) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointikpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096))ng/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 26) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 24) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 17) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: raise CheckpointingException(_msg) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rk7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 14) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank29]: Traceback (most recent call last): (RANK 18) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 15) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 27) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnng/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 20) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 14) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 21) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096r loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 27) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 16) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checr loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 27) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: Traceback (most recent call last): (RANK 15) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: raier.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 17) +[rank27]: Traceback (most recent call last): (RANK 28) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 29) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 28) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 16) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/chec-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: result = func(*args, **kwargs) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 18) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/han/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 29) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hakpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank12]: Traceback (most recent call last): (RANK 25) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 19) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: Traceback (most recent call last): (RANK 22) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 26) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[raner.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 17) +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 23) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fong/strategies/torch.py", line 605, in create_local_plan +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 17) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 18) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: Traceback (most recent call last): (RANK 24) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/siteng/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 19) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: Traceback (most recent call last): (RANK 14) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 18) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 14) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[ranank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 25) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 15) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096n/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank1]: ^^^^^^^^^ +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 15) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 22) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 26) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank31]: Traceback (most recent call last): (RANK 16) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/chec])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 19) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 16) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 27) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank23]: Traceback (most recent call last): (RANK 23) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checo.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 30) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnse CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 22) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 20) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 24) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site tensor for key embedding.position_embeddings.weight +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 21) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank20]: Traceback (most recent call last): (RANK 11) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 31) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 23) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatro])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 19) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 12) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +r loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 27) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 24) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 25) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 20) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 13) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 28) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 21) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointikpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 29) +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 26) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatroo.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank22]: Traceback (most recent call last): (RANK 17) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/ha-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 27) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 30) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 18) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnse CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 22) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 31) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank15]: Traceback (most recent call last): (RANK 25) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096o.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 30) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 26) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 23) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: raise CheckpointingException(_msg) +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +o.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 30) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 31) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fot/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 24) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rk2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 31) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 19) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 28) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 29) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 20) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 20) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 21) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.Checkpoino.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: Traceback (most recent call last): (RANK 25) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 21) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 30) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raik6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 26) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatro-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fokpoint/logger.py", line 87, in wrapper +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 31) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 20) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: result = func(*args, **kwargs) +[rank23]: Traceback (most recent call last): (RANK 25) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 26) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +r loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 27) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 21) +[rank31]: Traceback (most recent call last): (RANK 17) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fong/strategies/torch.py", line 605, in create_local_plan +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 18) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 14) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 28) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: rait/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 29) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096n/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 22) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 15) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 28) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 16) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hakpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 29) +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 23) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 17) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 24) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/chec])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 19) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.Checkpoinank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank29]: local_data = map_fun() +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 20) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 18) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 25) +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/siter loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 27) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096o.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 28) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 21) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 30) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 26) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 29) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 22) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: Traceback (most recent call last): (RANK 31) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 27) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/ha-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 23) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +kpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 28) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 25) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 17) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 26) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: Traceback (most recent call last): (RANK 24) +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 29) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/siter loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 27) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 18) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.Checkpoinank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 27) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: Traceback (most recent call last): (RANK 28) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096tingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 30) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 25) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 28) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 29) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 26) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: result = func(*args, **kwargs) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 31) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 29) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hakpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 19) +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 27) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnse CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 22) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 17) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/han/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 22) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 18) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 20) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 23) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 23) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 21) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 24) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096n/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatro])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 19) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rse CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 22) +[rank30]: Traceback (most recent call last): (RANK 24) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 22) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/siteo.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 30) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 23) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 20) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 23) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 31) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: raise CheckpointingException(_msg) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 21) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 24) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 24) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rtingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 30) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 19) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 25) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 22) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 23) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 26) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 31) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch foo.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 24) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +t/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 20) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 30) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 28) +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 25) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 21) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank23]: Traceback (most recent call last): (RANK 31) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 29) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatroo.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 26) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 30) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 19) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fon/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: megatron.core.dist_checkpointing.core.Checkpoint/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 28) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 31) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 20) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 22) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 21) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 23) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 29) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.Checkpoinank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 25) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatro-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 24) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/siter loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 27) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 25) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 25) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 26) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 28) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch fon/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 26) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank6]: Traceback (most recent call last): (RANK 26) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 22) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 29) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 27) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mntingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 23) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 27) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hase CheckpointingException(_msg) +[rank3]: Traceback (most recent call last): (RANK 30) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 22) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 31) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 24) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/siter loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 27) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 28) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 23) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank30]: local_data = map_fun() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 29) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +tingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 30) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: ^^^^^^^^^ +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 24) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 31) +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 28) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/har loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 27) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank9]: local_plan = planner.create_local_plan() +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 29) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[ro.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 30) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/ha-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 28) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 25) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 29) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 31) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +ank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 25) +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: ^^^^^^^^^ +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 26) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 26) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch foo.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 30) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 25) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 31) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 27) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 30) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 26) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +n/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 22) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch foank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 28) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: Traceback (most recent call last): (RANK 31) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +r loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 27) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 23) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 25) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 29) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 28) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 24) +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 30) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 29) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 26) +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 25) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 31) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 30) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 27) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnr loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 27) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 31) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 26) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +tingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 30) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 28) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch foo.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 31) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 30) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 29) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +t/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 31) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hat/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 28) +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 28) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 29) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +r loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 27) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 29) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 30) +[rank20]: Traceback (most recent call last): (RANK 28) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.Checkpoino.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 29) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 30) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 31) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 31) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +[rank20]: Traceback (most recent call last): (RANK 30) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +tingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 30) +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 31) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 31) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([4096, 4096])) and expected ((12288, 4096)) tensor for key embedding.position_embeddings.weight + +[rank2]:[W621 21:28:20.183490390 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank4]:[W621 21:28:20.185453818 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank28]:[W621 21:28:20.664372786 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank3]:[W621 21:28:20.275698754 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank26]:[W621 21:28:20.676258194 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank1]:[W621 21:28:20.323610605 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank0]:[W621 21:28:20.324187315 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank5]:[W621 21:28:20.345415395 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank9]:[W621 21:28:20.233452818 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank21]:[W621 21:28:20.695070379 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank10]:[W621 21:28:20.245802430 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank25]:[W621 21:28:20.774216736 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank6]:[W621 21:28:20.379030516 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank22]:[W621 21:28:20.739121212 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank7]:[W621 21:28:20.428675440 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank13]:[W621 21:28:20.323497923 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank18]:[W621 21:28:20.779489923 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank12]:[W621 21:28:20.344886070 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank23]:[W621 21:28:20.798902864 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank8]:[W621 21:28:20.345024431 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank20]:[W621 21:28:20.807835205 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank11]:[W621 21:28:20.346210081 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank29]:[W621 21:28:20.885242274 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank14]:[W621 21:28:20.367316155 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank31]:[W621 21:28:20.898084879 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank15]:[W621 21:28:20.376991878 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank17]:[W621 21:28:20.837076033 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank24]:[W621 21:28:20.906757316 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank30]:[W621 21:28:20.908977993 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank16]:[W621 21:28:20.898854489 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank19]:[W621 21:28:20.900486970 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank27]:[W621 21:28:20.140551259 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +W0621 21:28:21.139000 164632 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 164704 closing signal SIGTERM +W0621 21:28:21.140000 164632 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 164705 closing signal SIGTERM +W0621 21:28:21.140000 164632 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 164706 closing signal SIGTERM +W0621 21:28:21.141000 164632 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 164707 closing signal SIGTERM +W0621 21:28:21.141000 164632 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 164710 closing signal SIGTERM +W0621 21:28:21.143000 2083398 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2083468 closing signal SIGTERM +W0621 21:28:21.141000 164632 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 164711 closing signal SIGTERM +W0621 21:28:21.144000 2083398 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2083469 closing signal SIGTERM +W0621 21:28:21.145000 2083398 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2083471 closing signal SIGTERM +W0621 21:28:21.148000 3453495 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3453565 closing signal SIGTERM +W0621 21:28:21.145000 2083398 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2083472 closing signal SIGTERM +W0621 21:28:21.149000 3453495 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3453567 closing signal SIGTERM +W0621 21:28:21.146000 2083398 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2083473 closing signal SIGTERM +W0621 21:28:21.149000 3453495 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3453568 closing signal SIGTERM +W0621 21:28:21.146000 2083398 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2083474 closing signal SIGTERM +W0621 21:28:21.150000 3453495 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3453569 closing signal SIGTERM +W0621 21:28:21.146000 2083398 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2083475 closing signal SIGTERM +W0621 21:28:21.150000 3453495 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3453570 closing signal SIGTERM +W0621 21:28:21.150000 3453495 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3453571 closing signal SIGTERM +W0621 21:28:21.151000 3453495 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3453572 closing signal SIGTERM +W0621 21:28:21.361000 3384577 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3384648 closing signal SIGTERM +W0621 21:28:21.362000 3384577 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3384649 closing signal SIGTERM +W0621 21:28:21.362000 3384577 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3384650 closing signal SIGTERM +W0621 21:28:21.363000 3384577 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3384651 closing signal SIGTERM +W0621 21:28:21.363000 3384577 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3384652 closing signal SIGTERM +W0621 21:28:21.364000 3384577 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3384653 closing signal SIGTERM +W0621 21:28:21.364000 3384577 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3384654 closing signal SIGTERM +E0621 21:28:21.770000 164632 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 4 (pid: 164708) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() +E0621 21:28:21.825000 2083398 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 2 (pid: 2083470) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + run(args) + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return arg(*args, **kwargs) + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + launch(args) + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: +[1]: + time : 2025-06-21_21:28:21 + host : fs-mbz-gpu-852 + rank : 5 (local_rank: 5) + exitcode : 1 (pid: 164709) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:28:21 + host : fs-mbz-gpu-852 + rank : 4 (local_rank: 4) + exitcode : 1 (pid: 164708) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run +E0621 21:28:21.866000 3453495 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 1 (pid: 3453566) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent +W0621 21:28:21.875000 3453495 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-870_3453495_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:28:21 + host : fs-mbz-gpu-901 + rank : 26 (local_rank: 2) + exitcode : 1 (pid: 2083470) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ +[W621 21:28:21.794153176 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-870]:60586, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14efd07785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14efb9a5aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14efb9a5c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x14efb9a5db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x14efb9a57ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x14efb9a57ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x14efb9a58f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x14efc8d8b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x14efc84fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #26: + 0x29d90 (0x14efd1a7cd90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #27: __libc_start_main + 0x80 (0x14efd1a7ce40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:28:21.887000 3453495 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-870_3453495_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. +[W621 21:28:21.805156636 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-870]:60586, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14efd07785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14efb9a5aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14efb9a5c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x14efb9a5db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x14efb9a57ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x14efb9a57ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x14efb9a58f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x14efc8d8b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x14efc84fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #26: + 0x29d90 (0x14efd1a7cd90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #27: __libc_start_main + 0x80 (0x14efd1a7ce40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:28:21.899000 3453495 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-870_3453495_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:28:21 + host : fs-mbz-gpu-870 + rank : 9 (local_rank: 1) + exitcode : 1 (pid: 3453566) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ ++ set +x ++ set +x +E0621 21:28:22.129000 3384577 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 0 (pid: 3384647) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 +W0621 21:28:22.138000 3384577 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-881_3384577_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. +[W621 21:28:22.509758337 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-881]:52932, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x148efff785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x148ee925aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x148ee925c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x148ee925db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x148ee9257ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x148ee9257ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x148ee9258f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x148ef858b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x148ef7cfb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #26: + 0x29d90 (0x148f01252d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #27: __libc_start_main + 0x80 (0x148f01252e40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:28:22.150000 3384577 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-881_3384577_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. +[W621 21:28:22.520613555 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-881]:52932, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x148efff785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x148ee925aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x148ee925c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x148ee925db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x148ee9257ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x148ee9257ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x148ee9258f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x148ef858b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x148ef7cfb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #26: + 0x29d90 (0x148f01252d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #27: __libc_start_main + 0x80 (0x148f01252e40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:28:22.160000 3384577 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-881_3384577_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) ++ set +x + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:28:21 + host : fs-mbz-gpu-881 + rank : 16 (local_rank: 0) + exitcode : 1 (pid: 3384647) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ ++ set +x ++ for ctx_length in 1024 2048 4096 8192 12288 16384 24576 32768 40960 49152 65536 81920 98304 131072 ++ export PROF_CTX_LENGTH=16384 ++ PROF_CTX_LENGTH=16384 ++ name='/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L16384*tp4.cp8.bs1.json' ++ '[' -f '/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L16384*tp4.cp8.bs1.json' ']' ++ echo 'Running ctx_length=16384, TP_SIZE=4, CP_SIZE=8, BATCH_SIZE=1' ++ srun bash ./attnserver.sh ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 3 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 16384 --max-position-embeddings 16384 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 1 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 16384 --max-position-embeddings 16384 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 0 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 16384 --max-position-embeddings 16384 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 2 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 16384 --max-position-embeddings 16384 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:28:26.387000 166537 site-packages/torch/distributed/run.py:766] +W0621 21:28:26.387000 166537 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:28:26.387000 166537 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:28:26.387000 166537 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:28:26.423000 2085247 site-packages/torch/distributed/run.py:766] +W0621 21:28:26.423000 2085247 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:28:26.423000 2085247 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:28:26.423000 2085247 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:28:26.447000 3386426 site-packages/torch/distributed/run.py:766] +W0621 21:28:26.447000 3386426 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:28:26.447000 3386426 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:28:26.447000 3386426 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:28:26.474000 3455328 site-packages/torch/distributed/run.py:766] +W0621 21:28:26.474000 3455328 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:28:26.474000 3455328 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:28:26.474000 3455328 site-packages/torch/distributed/run.py:766] ***************************************** +[rank2]:[W621 21:28:49.954533042 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 2] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank6]:[W621 21:28:49.955437663 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 6] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank30]:[W621 21:28:49.365208523 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 30] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank26]:[W621 21:28:49.365208369 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 26] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank10]:[W621 21:28:49.842955315 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 10] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank14]:[W621 21:28:49.844153244 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 14] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank22]:[W621 21:28:49.296956578 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 22] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank18]:[W621 21:28:49.296959242 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 18] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank1]:[W621 21:28:49.962474831 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 1] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank5]:[W621 21:28:49.963890762 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 5] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank29]:[W621 21:28:49.372956791 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 29] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank13]:[W621 21:28:49.850842807 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 13] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank17]:[W621 21:28:49.304501557 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 17] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank7]:[W621 21:28:49.965724234 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 7] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank25]:[W621 21:28:49.372991477 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 25] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank9]:[W621 21:28:49.851317571 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 9] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank21]:[W621 21:28:49.304823474 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 21] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank3]:[W621 21:28:49.965886723 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 3] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank27]:[W621 21:28:49.374496055 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 27] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank15]:[W621 21:28:49.853879632 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 15] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank19]:[W621 21:28:49.307056262 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 19] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank31]:[W621 21:28:49.375970930 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 31] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank11]:[W621 21:28:49.854758275 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 11] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank23]:[W621 21:28:49.307811189 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 23] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank4]:[W621 21:28:49.981376062 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 4] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank12]:[W621 21:28:49.868211872 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 12] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank28]:[W621 21:28:49.394990705 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 28] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank20]:[W621 21:28:49.345464718 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 20] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank8]:[W621 21:28:50.953832312 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 8] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank24]:[W621 21:28:50.478603020 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 24] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank16]:[W621 21:28:50.428814206 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 16] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank0]:[W621 21:28:50.123529637 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 0] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +[rank0]: Traceback (most recent call last): +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank0]: pretrain( +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 879, in pretrain +[rank0]: save_checkpoint( +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 469, in save_checkpoint +[rank0]: async_save_request = dist_checkpointing.save(state_dict, checkpoint_name, save_strategy, +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 386, in save +[rank0]: common_strategy.save_common(state_dict, checkpoint_dir) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/common.py", line 48, in save_common +[rank0]: torch.save(common_state_dict, path) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/serialization.py", line 964, in save +[rank0]: with _open_zipfile_writer(f) as opened_zipfile: +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/serialization.py", line 828, in _open_zipfile_writer +[rank0]: return container(name_or_buffer) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/serialization.py", line 792, in __init__ +[rank0]: torch._C.PyTorchFileWriter( +[rank0]: RuntimeError: Parent directory gpt-checkpoint/iter_0000010 does not exist. +[rank0]:[W621 21:29:53.617051381 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +W0621 21:29:59.126000 166537 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 166610 closing signal SIGTERM +W0621 21:29:59.129000 166537 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 166611 closing signal SIGTERM +W0621 21:29:59.131000 166537 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 166612 closing signal SIGTERM +W0621 21:29:59.133000 166537 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 166613 closing signal SIGTERM +W0621 21:29:59.151000 166537 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 166614 closing signal SIGTERM +W0621 21:29:59.154000 166537 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 166615 closing signal SIGTERM +W0621 21:29:59.156000 166537 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 166616 closing signal SIGTERM +E0621 21:30:00.956000 166537 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 0 (pid: 166609) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:29:59 + host : fs-mbz-gpu-852 + rank : 0 (local_rank: 0) + exitcode : 1 (pid: 166609) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ +[rank8]:[W621 21:30:01.066764902 TCPStore.cpp:125] [c10d] recvValue failed on SocketImpl(fd=75, addr=[fs-mbz-gpu-870]:50812, remote=[fs-mbz-gpu-852]:39865): failed to recv, got 0 bytes +Exception raised from recvBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:678 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x15126cb785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x151255e5aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baae40 (0x151255e5ce40 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5bab74a (0x151255e5d74a in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::check(std::vector, std::allocator >, std::allocator, std::allocator > > > const&) + 0x2a9 (0x151255e571a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::ProcessGroupNCCL::heartbeatMonitor() + 0x379 (0x1512130509a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cuda.so) +frame #6: + 0xd3b6d (0x151203019b6d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/../lib/libstdc++.so.6) +frame #7: + 0x94ac3 (0x15126df83ac3 in /lib/x86_64-linux-gnu/libc.so.6) +frame #8: + 0x126850 (0x15126e015850 in /lib/x86_64-linux-gnu/libc.so.6) + +[rank8]:[W621 21:30:01.071168626 ProcessGroupNCCL.cpp:1659] [PG ID 0 PG GUID 0(default_pg) Rank 8] Failed to check the "should dump" flag on TCPStore, (maybe TCPStore server has shut down too early), with error: failed to recv, got 0 bytes +[rank14]:[W621 21:30:01.066628982 TCPStore.cpp:125] [c10d] recvValue failed on SocketImpl(fd=95, addr=[fs-mbz-gpu-870]:50814, remote=[fs-mbz-gpu-852]:39865): failed to recv, got 0 bytes +Exception raised from recvBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:678 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14c6dab785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14c6c3e5aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baae40 (0x14c6c3e5ce40 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5bab74a (0x14c6c3e5d74a in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::check(std::vector, std::allocator >, std::allocator, std::allocator > > > const&) + 0x2a9 (0x14c6c3e571a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::ProcessGroupNCCL::heartbeatMonitor() + 0x379 (0x14c6810509a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cuda.so) +frame #6: + 0xd3b6d (0x14c671019b6d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/../lib/libstdc++.so.6) +frame #7: + 0x94ac3 (0x14c6dbf5fac3 in /lib/x86_64-linux-gnu/libc.so.6) +frame #8: + 0x126850 (0x14c6dbff1850 in /lib/x86_64-linux-gnu/libc.so.6) + +[rank14]:[W621 21:30:01.071320162 ProcessGroupNCCL.cpp:1659] [PG ID 0 PG GUID 0(default_pg) Rank 14] Failed to check the "should dump" flag on TCPStore, (maybe TCPStore server has shut down too early), with error: failed to recv, got 0 bytes ++ set +x +[rank23]:[W621 21:30:01.721887301 TCPStore.cpp:125] [c10d] recvValue failed on SocketImpl(fd=95, addr=[fs-mbz-gpu-881]:51252, remote=[fs-mbz-gpu-852]:39865): failed to recv, got 0 bytes +Exception raised from recvBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:678 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14ebfc9785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14ebe585aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baae40 (0x14ebe585ce40 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5bab74a (0x14ebe585d74a in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::check(std::vector, std::allocator >, std::allocator, std::allocator > > > const&) + 0x2a9 (0x14ebe58571a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::ProcessGroupNCCL::heartbeatMonitor() + 0x379 (0x14eba2a509a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cuda.so) +frame #6: + 0xd3b6d (0x14ebfc4f1b6d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/../lib/libstdc++.so.6) +frame #7: + 0x94ac3 (0x14ebfd9c9ac3 in /lib/x86_64-linux-gnu/libc.so.6) +frame #8: + 0x126850 (0x14ebfda5b850 in /lib/x86_64-linux-gnu/libc.so.6) + +[rank21]:[W621 21:30:01.721889994 TCPStore.cpp:125] [c10d] recvValue failed on SocketImpl(fd=95, addr=[fs-mbz-gpu-881]:51220, remote=[fs-mbz-gpu-852]:39865): failed to recv, got 0 bytes +Exception raised from recvBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:678 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14cb96b785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14cb7fe5aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baae40 (0x14cb7fe5ce40 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5bab74a (0x14cb7fe5d74a in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::check(std::vector, std::allocator >, std::allocator, std::allocator > > > const&) + 0x2a9 (0x14cb7fe571a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::ProcessGroupNCCL::heartbeatMonitor() + 0x379 (0x14cb3d0509a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cuda.so) +frame #6: + 0xd3b6d (0x14cb2d019b6d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/../lib/libstdc++.so.6) +frame #7: + 0x94ac3 (0x14cb97f75ac3 in /lib/x86_64-linux-gnu/libc.so.6) +frame #8: + 0x126850 (0x14cb98007850 in /lib/x86_64-linux-gnu/libc.so.6) + +[rank23]:[W621 21:30:01.726124516 ProcessGroupNCCL.cpp:1659] [PG ID 0 PG GUID 0(default_pg) Rank 23] Failed to check the "should dump" flag on TCPStore, (maybe TCPStore server has shut down too early), with error: failed to recv, got 0 bytes +[rank21]:[W621 21:30:01.726151495 ProcessGroupNCCL.cpp:1659] [PG ID 0 PG GUID 0(default_pg) Rank 21] Failed to check the "should dump" flag on TCPStore, (maybe TCPStore server has shut down too early), with error: failed to recv, got 0 bytes +[rank22]:[W621 21:30:01.723074182 TCPStore.cpp:125] [c10d] recvValue failed on SocketImpl(fd=95, addr=[fs-mbz-gpu-881]:51210, remote=[fs-mbz-gpu-852]:39865): failed to recv, got 0 bytes +Exception raised from recvBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:678 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x147ca87785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x147c91a5aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baae40 (0x147c91a5ce40 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5bab74a (0x147c91a5d74a in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::check(std::vector, std::allocator >, std::allocator, std::allocator > > > const&) + 0x2a9 (0x147c91a571a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::ProcessGroupNCCL::heartbeatMonitor() + 0x379 (0x147c4ec509a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cuda.so) +frame #6: + 0xd3b6d (0x147c3ec19b6d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/../lib/libstdc++.so.6) +frame #7: + 0x94ac3 (0x147ca9abeac3 in /lib/x86_64-linux-gnu/libc.so.6) +frame #8: + 0x126850 (0x147ca9b50850 in /lib/x86_64-linux-gnu/libc.so.6) + +[rank22]:[W621 21:30:01.726893930 ProcessGroupNCCL.cpp:1659] [PG ID 0 PG GUID 0(default_pg) Rank 22] Failed to check the "should dump" flag on TCPStore, (maybe TCPStore server has shut down too early), with error: failed to recv, got 0 bytes +[rank16]:[W621 21:30:01.774960388 TCPStore.cpp:125] [c10d] recvValue failed on SocketImpl(fd=75, addr=[fs-mbz-gpu-881]:51198, remote=[fs-mbz-gpu-852]:39865): failed to recv, got 0 bytes +Exception raised from recvBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:678 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x15359df785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x15358725aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baae40 (0x15358725ce40 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5bab74a (0x15358725d74a in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::check(std::vector, std::allocator >, std::allocator, std::allocator > > > const&) + 0x2a9 (0x1535872571a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::ProcessGroupNCCL::heartbeatMonitor() + 0x379 (0x1535444509a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cuda.so) +frame #6: + 0xd3b6d (0x153534419b6d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/../lib/libstdc++.so.6) +frame #7: + 0x94ac3 (0x15359f28fac3 in /lib/x86_64-linux-gnu/libc.so.6) +frame #8: + 0x126850 (0x15359f321850 in /lib/x86_64-linux-gnu/libc.so.6) + +[rank16]:[W621 21:30:01.778826431 ProcessGroupNCCL.cpp:1659] [PG ID 0 PG GUID 0(default_pg) Rank 16] Failed to check the "should dump" flag on TCPStore, (maybe TCPStore server has shut down too early), with error: failed to recv, got 0 bytes +[rank15]:[W621 21:30:01.378340410 TCPStore.cpp:125] [c10d] recvValue failed on SocketImpl(fd=95, addr=[fs-mbz-gpu-870]:50856, remote=[fs-mbz-gpu-852]:39865): failed to recv, got 0 bytes +Exception raised from recvBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:678 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14fab11785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14fa9a05aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baae40 (0x14fa9a05ce40 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5bab74a (0x14fa9a05d74a in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::check(std::vector, std::allocator >, std::allocator, std::allocator > > > const&) + 0x2a9 (0x14fa9a0571a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::ProcessGroupNCCL::heartbeatMonitor() + 0x379 (0x14fa572509a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cuda.so) +frame #6: + 0xd3b6d (0x14fab0cf1b6d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/../lib/libstdc++.so.6) +frame #7: + 0x94ac3 (0x14fab2235ac3 in /lib/x86_64-linux-gnu/libc.so.6) +frame #8: + 0x126850 (0x14fab22c7850 in /lib/x86_64-linux-gnu/libc.so.6) + +[rank15]:[W621 21:30:01.382448755 ProcessGroupNCCL.cpp:1659] [PG ID 0 PG GUID 0(default_pg) Rank 15] Failed to check the "should dump" flag on TCPStore, (maybe TCPStore server has shut down too early), with error: failed to recv, got 0 bytes +[rank12]:[W621 21:30:01.378638433 TCPStore.cpp:125] [c10d] recvValue failed on SocketImpl(fd=95, addr=[fs-mbz-gpu-870]:50836, remote=[fs-mbz-gpu-852]:39865): failed to recv, got 0 bytes +Exception raised from recvBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:678 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14d3cc3785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14d3b565aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baae40 (0x14d3b565ce40 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5bab74a (0x14d3b565d74a in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::check(std::vector, std::allocator >, std::allocator, std::allocator > > > const&) + 0x2a9 (0x14d3b56571a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::ProcessGroupNCCL::heartbeatMonitor() + 0x379 (0x14d3728509a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cuda.so) +frame #6: + 0xd3b6d (0x14d362819b6d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/../lib/libstdc++.so.6) +frame #7: + 0x94ac3 (0x14d3cd71cac3 in /lib/x86_64-linux-gnu/libc.so.6) +frame #8: + 0x126850 (0x14d3cd7ae850 in /lib/x86_64-linux-gnu/libc.so.6) + +[rank12]:[W621 21:30:01.382624201 ProcessGroupNCCL.cpp:1659] [PG ID 0 PG GUID 0(default_pg) Rank 12] Failed to check the "should dump" flag on TCPStore, (maybe TCPStore server has shut down too early), with error: failed to recv, got 0 bytes +[rank13]:[W621 21:30:01.378449596 TCPStore.cpp:125] [c10d] recvValue failed on SocketImpl(fd=95, addr=[fs-mbz-gpu-870]:50820, remote=[fs-mbz-gpu-852]:39865): failed to recv, got 0 bytes +Exception raised from recvBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:678 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14ca83b785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14ca6ce5aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baae40 (0x14ca6ce5ce40 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5bab74a (0x14ca6ce5d74a in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::check(std::vector, std::allocator >, std::allocator, std::allocator > > > const&) + 0x2a9 (0x14ca6ce571a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::ProcessGroupNCCL::heartbeatMonitor() + 0x379 (0x14ca2a0509a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cuda.so) +frame #6: + 0xd3b6d (0x14ca1a019b6d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/../lib/libstdc++.so.6) +frame #7: + 0x94ac3 (0x14ca84e92ac3 in /lib/x86_64-linux-gnu/libc.so.6) +frame #8: + 0x126850 (0x14ca84f24850 in /lib/x86_64-linux-gnu/libc.so.6) + +[rank13]:[W621 21:30:01.383490751 ProcessGroupNCCL.cpp:1659] [PG ID 0 PG GUID 0(default_pg) Rank 13] Failed to check the "should dump" flag on TCPStore, (maybe TCPStore server has shut down too early), with error: failed to recv, got 0 bytes +[rank10]:[W621 21:30:01.379143979 TCPStore.cpp:125] [c10d] recvValue failed on SocketImpl(fd=95, addr=[fs-mbz-gpu-870]:50824, remote=[fs-mbz-gpu-852]:39865): failed to recv, got 0 bytes +Exception raised from recvBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:678 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x154d909785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x154d79c5aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baae40 (0x154d79c5ce40 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5bab74a (0x154d79c5d74a in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::check(std::vector, std::allocator >, std::allocator, std::allocator > > > const&) + 0x2a9 (0x154d79c571a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::ProcessGroupNCCL::heartbeatMonitor() + 0x379 (0x154d36e509a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cuda.so) +frame #6: + 0xd3b6d (0x154d26e19b6d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/../lib/libstdc++.so.6) +frame #7: + 0x94ac3 (0x154d91c98ac3 in /lib/x86_64-linux-gnu/libc.so.6) +frame #8: + 0x126850 (0x154d91d2a850 in /lib/x86_64-linux-gnu/libc.so.6) + +[rank10]:[W621 21:30:01.383633212 ProcessGroupNCCL.cpp:1659] [PG ID 0 PG GUID 0(default_pg) Rank 10] Failed to check the "should dump" flag on TCPStore, (maybe TCPStore server has shut down too early), with error: failed to recv, got 0 bytes +[rank9]:[W621 21:30:01.380187706 TCPStore.cpp:125] [c10d] recvValue failed on SocketImpl(fd=95, addr=[fs-mbz-gpu-870]:50864, remote=[fs-mbz-gpu-852]:39865): failed to recv, got 0 bytes +Exception raised from recvBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:678 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x1480d27785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x1480bb65aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baae40 (0x1480bb65ce40 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5bab74a (0x1480bb65d74a in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::check(std::vector, std::allocator >, std::allocator, std::allocator > > > const&) + 0x2a9 (0x1480bb6571a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::ProcessGroupNCCL::heartbeatMonitor() + 0x379 (0x1480788509a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cuda.so) +frame #6: + 0xd3b6d (0x1480d22f1b6d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/../lib/libstdc++.so.6) +frame #7: + 0x94ac3 (0x1480d3842ac3 in /lib/x86_64-linux-gnu/libc.so.6) +frame #8: + 0x126850 (0x1480d38d4850 in /lib/x86_64-linux-gnu/libc.so.6) + +[rank9]:[W621 21:30:01.383992215 ProcessGroupNCCL.cpp:1659] [PG ID 0 PG GUID 0(default_pg) Rank 9] Failed to check the "should dump" flag on TCPStore, (maybe TCPStore server has shut down too early), with error: failed to recv, got 0 bytes +[rank11]:[W621 21:30:01.380436552 TCPStore.cpp:125] [c10d] recvValue failed on SocketImpl(fd=95, addr=[fs-mbz-gpu-870]:50840, remote=[fs-mbz-gpu-852]:39865): failed to recv, got 0 bytes +Exception raised from recvBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:678 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x146e0ed785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x146df805aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baae40 (0x146df805ce40 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5bab74a (0x146df805d74a in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::check(std::vector, std::allocator >, std::allocator, std::allocator > > > const&) + 0x2a9 (0x146df80571a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::ProcessGroupNCCL::heartbeatMonitor() + 0x379 (0x146db52509a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cuda.so) +frame #6: + 0xd3b6d (0x146da5219b6d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/../lib/libstdc++.so.6) +frame #7: + 0x94ac3 (0x146e10183ac3 in /lib/x86_64-linux-gnu/libc.so.6) +frame #8: + 0x126850 (0x146e10215850 in /lib/x86_64-linux-gnu/libc.so.6) + +[rank11]:[W621 21:30:01.384327674 ProcessGroupNCCL.cpp:1659] [PG ID 0 PG GUID 0(default_pg) Rank 11] Failed to check the "should dump" flag on TCPStore, (maybe TCPStore server has shut down too early), with error: failed to recv, got 0 bytes +[rank24]:[W621 21:30:01.025047418 TCPStore.cpp:125] [c10d] recvValue failed on SocketImpl(fd=75, addr=[fs-mbz-gpu-901]:58566, remote=[fs-mbz-gpu-852]:39865): failed to recv, got 0 bytes +Exception raised from recvBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:678 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x151ee79785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x151ed085aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baae40 (0x151ed085ce40 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5bab74a (0x151ed085d74a in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::check(std::vector, std::allocator >, std::allocator, std::allocator > > > const&) + 0x2a9 (0x151ed08571a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::ProcessGroupNCCL::heartbeatMonitor() + 0x379 (0x151e8da509a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cuda.so) +frame #6: + 0xd3b6d (0x151ee74f1b6d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/../lib/libstdc++.so.6) +frame #7: + 0x94ac3 (0x151ee8a06ac3 in /lib/x86_64-linux-gnu/libc.so.6) +frame #8: + 0x126850 (0x151ee8a98850 in /lib/x86_64-linux-gnu/libc.so.6) + +[rank24]:[W621 21:30:01.029362418 ProcessGroupNCCL.cpp:1659] [PG ID 0 PG GUID 0(default_pg) Rank 24] Failed to check the "should dump" flag on TCPStore, (maybe TCPStore server has shut down too early), with error: failed to recv, got 0 bytes +W0621 21:30:01.694000 2085247 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2085317 closing signal SIGTERM +W0621 21:30:01.699000 2085247 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2085318 closing signal SIGTERM +W0621 21:30:01.701000 2085247 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2085319 closing signal SIGTERM +W0621 21:30:01.703000 2085247 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2085320 closing signal SIGTERM +W0621 21:30:01.705000 2085247 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2085321 closing signal SIGTERM +W0621 21:30:01.708000 2085247 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2085322 closing signal SIGTERM +W0621 21:30:01.711000 2085247 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2085323 closing signal SIGTERM +W0621 21:30:01.732000 2085247 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2085324 closing signal SIGTERM +W0621 21:30:01.786000 3386426 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3386496 closing signal SIGTERM +W0621 21:30:01.789000 3386426 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3386497 closing signal SIGTERM +W0621 21:30:01.792000 3386426 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3386498 closing signal SIGTERM +W0621 21:30:01.794000 3386426 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3386499 closing signal SIGTERM +W0621 21:30:01.797000 3386426 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3386500 closing signal SIGTERM +W0621 21:30:01.800000 3386426 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3386501 closing signal SIGTERM +W0621 21:30:01.803000 3386426 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3386502 closing signal SIGTERM +W0621 21:30:01.806000 3386426 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3386503 closing signal SIGTERM +[W621 21:30:01.758271367 TCPStore.cpp:115] [c10d] recvVector failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-870]:49270, remote=[fs-mbz-gpu-852]:29500): failed to recv, got 0 bytes +Exception raised from recvBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:678 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x15158d3785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x15157665aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa0d0 (0x15157665c0d0 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5baa81d (0x15157665c81d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: + 0x5bab4a9 (0x15157665d4a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::compareSet(std::__cxx11::basic_string, std::allocator > const&, std::vector > const&, std::vector > const&) + 0x1fb (0x1515766574cb in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: + 0xc0f919 (0x15158598b919 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #7: + 0x37f17d (0x1515850fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #16: + 0x94ac3 (0x15158e73dac3 in /lib/x86_64-linux-gnu/libc.so.6) +frame #17: + 0x126850 (0x15158e7cf850 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:30:01.846000 3455328 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1341] The node 'fs-mbz-gpu-870_3455328_0' has failed to send a keep-alive heartbeat to the rendezvous '343213' due to an error of type RendezvousConnectionError. +[W621 21:30:01.774761933 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-870]:49270, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x15158d3785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x15157665aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x15157665c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x15157665db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::compareSet(std::__cxx11::basic_string, std::allocator > const&, std::vector > const&, std::vector > const&) + 0x299 (0x151576657569 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: + 0xc0f919 (0x15158598b919 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #6: + 0x37f17d (0x1515850fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #24: + 0x29d90 (0x15158e6d2d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #25: __libc_start_main + 0x80 (0x15158e6d2e40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:30:01.869000 3455328 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3455397 closing signal SIGTERM +W0621 21:30:01.873000 3455328 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3455398 closing signal SIGTERM +W0621 21:30:01.876000 3455328 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3455399 closing signal SIGTERM +W0621 21:30:01.878000 3455328 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3455400 closing signal SIGTERM +W0621 21:30:01.881000 3455328 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3455401 closing signal SIGTERM +W0621 21:30:01.904000 3455328 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3455402 closing signal SIGTERM +W0621 21:30:01.907000 3455328 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3455403 closing signal SIGTERM +W0621 21:30:01.914000 3455328 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3455404 closing signal SIGTERM +[W621 21:30:02.025475946 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-901]:44556, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x1516acb785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x151695a5aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x151695a5c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x151695a5db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x151695a57ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x151695a57ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x151695a58f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x1516a4d8b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x1516a44fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #17: + 0x94ac3 (0x1516adc36ac3 in /lib/x86_64-linux-gnu/libc.so.6) +frame #18: + 0x126850 (0x1516adcc8850 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:30:02.593000 2085247 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1341] The node 'fs-mbz-gpu-901_2085247_0' has failed to send a keep-alive heartbeat to the rendezvous '343213' due to an error of type RendezvousConnectionError. +[W621 21:30:02.120564271 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-881]:57010, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14f17cf785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14f16625aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14f16625c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x14f16625db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x14f166257ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x14f166257ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x14f166258f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x14f17558b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x14f174cfb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #17: + 0x94ac3 (0x14f17e28dac3 in /lib/x86_64-linux-gnu/libc.so.6) +frame #18: + 0x126850 (0x14f17e31f850 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:30:02.763000 3386426 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1341] The node 'fs-mbz-gpu-881_3386426_0' has failed to send a keep-alive heartbeat to the rendezvous '343213' due to an error of type RendezvousConnectionError. +[W621 21:30:04.292822417 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-870]:49270, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x15158d3785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x15157665aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x15157665c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x15157665db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::compareSet(std::__cxx11::basic_string, std::allocator > const&, std::vector > const&, std::vector > const&) + 0x299 (0x151576657569 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: + 0xc0f919 (0x15158598b919 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #6: + 0x37f17d (0x1515850fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +[W621 21:30:04.814767413 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-901]:44556, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x1516acb785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x151695a5aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x151695a5c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #24: + 0x29d90 (0x15158e6d2d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #25: __libc_start_main + 0x80 (0x15158e6d2e40 in /lib/x86_64-linux-gnu/libc.so.6) + +frame #3: + 0x5babb3e (0x151695a5db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x151695a57ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x151695a57ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x151695a58f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x1516a4d8b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x1516a44fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #26: + 0x29d90 (0x1516adbcbd90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #27: __libc_start_main + 0x80 (0x1516adbcbe40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:30:04.387000 3455328 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-870_3455328_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. +W0621 21:30:04.389000 2085247 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-901_2085247_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. +[W621 21:30:04.305867854 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-870]:49270, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x15158d3785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x15157665aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x15157665c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x15157665db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::compareSet(std::__cxx11::basic_string, std::allocator > const&, std::vector > const&, std::vector > const&) + 0x299 (0x151576657569 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: + 0xc0f919 (0x15158598b919 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #6: + 0x37f17d (0x1515850fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #24: + 0x29d90 (0x15158e6d2d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #25: __libc_start_main + 0x80 (0x15158e6d2e40 in /lib/x86_64-linux-gnu/libc.so.6) + +[W621 21:30:04.830049973 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-901]:44556, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x1516acb785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x151695a5aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x151695a5c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x151695a5db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x151695a57ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x151695a57ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x151695a58f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x1516a4d8b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x1516a44fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #26: + 0x29d90 (0x1516adbcbd90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #27: __libc_start_main + 0x80 (0x1516adbcbe40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:30:04.397000 3455328 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-870_3455328_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. +Traceback (most recent call last): + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/rendezvous/c10d_rendezvous_backend.py", line 117, in _call_store + return getattr(self._store, store_op)(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +torch.distributed.DistNetworkError: Broken pipe + +The above exception was the direct cause of the following exception: + +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) +W0621 21:30:04.401000 2085247 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-901_2085247_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main +Traceback (most recent call last): + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/rendezvous/c10d_rendezvous_backend.py", line 117, in _call_store + launch(args) + return getattr(self._store, store_op)(*args, **kwargs) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +torch.distributed.DistNetworkError: failed to recv, got 0 bytes + + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run +The above exception was the direct cause of the following exception: + +Traceback (most recent call last): + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + return launch_agent(self._config, self._entrypoint, list(args)) + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 261, in launch_agent + return arg(*args, **kwargs) + result = agent.run() + ^^^^^^^^^^^^^^^^^^^^ + ^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/metrics/api.py", line 138, in wrapper + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + result = f(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/agent/server/api.py", line 711, in run + launch(args) + result = self._invoke_run(role) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + ^^^^^^^^^^^^^^^^^^^^^^ + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/agent/server/api.py", line 906, in _invoke_run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + num_nodes_waiting = rdzv_handler.num_nodes_waiting() + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 261, in launch_agent + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py", line 1263, in num_nodes_waiting + result = agent.run() + self._state_holder.sync() + ^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/metrics/api.py", line 138, in wrapper + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py", line 423, in sync + result = f(*args, **kwargs) + set_response = self._backend.set_state(state_bits, self._token) + ^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/agent/server/api.py", line 711, in run + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/rendezvous/c10d_rendezvous_backend.py", line 100, in set_state + result = self._invoke_run(role) + base64_state: bytes = self._call_store( + ^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/rendezvous/c10d_rendezvous_backend.py", line 119, in _call_store + ^^^^^^^^^^^^^^^^^^^^^^ + raise RendezvousConnectionError( +torch.distributed.elastic.rendezvous.api.RendezvousConnectionError: The connection to the C10d store has failed. See inner exception for details. + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/agent/server/api.py", line 906, in _invoke_run + num_nodes_waiting = rdzv_handler.num_nodes_waiting() + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py", line 1263, in num_nodes_waiting + self._state_holder.sync() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py", line 437, in sync + get_response = self._backend.get_state() + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/rendezvous/c10d_rendezvous_backend.py", line 75, in get_state + base64_state: bytes = self._call_store("get", self._key) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/rendezvous/c10d_rendezvous_backend.py", line 119, in _call_store + raise RendezvousConnectionError( +torch.distributed.elastic.rendezvous.api.RendezvousConnectionError: The connection to the C10d store has failed. See inner exception for details. +[W621 21:30:04.062081369 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-881]:57010, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14f17cf785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14f16625aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14f16625c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x14f16625db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x14f166257ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x14f166257ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x14f166258f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x14f17558b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x14f174cfb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #26: + 0x29d90 (0x14f17e222d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #27: __libc_start_main + 0x80 (0x14f17e222e40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:30:04.704000 3386426 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-881_3386426_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. +[W621 21:30:04.078244466 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-881]:57010, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14f17cf785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14f16625aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14f16625c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x14f16625db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::doWait(c10::ArrayRef, std::allocator > >, std::chrono::duration >) + 0x1a6 (0x14f166257ac6 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::doGet(std::__cxx11::basic_string, std::allocator > const&) + 0x33 (0x14f166257ea3 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: c10d::TCPStore::get(std::__cxx11::basic_string, std::allocator > const&) + 0xab (0x14f166258f8b in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #7: + 0xc0f526 (0x14f17558b526 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #8: + 0x37f17d (0x14f174cfb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #26: + 0x29d90 (0x14f17e222d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #27: __libc_start_main + 0x80 (0x14f17e222e40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:30:04.718000 3386426 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-881_3386426_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. ++ set +x +Traceback (most recent call last): + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/rendezvous/c10d_rendezvous_backend.py", line 117, in _call_store + return getattr(self._store, store_op)(*args, **kwargs) ++ set +x + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +torch.distributed.DistNetworkError: failed to recv, got 0 bytes + +The above exception was the direct cause of the following exception: + +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 261, in launch_agent + result = agent.run() + ^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/metrics/api.py", line 138, in wrapper + result = f(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/agent/server/api.py", line 711, in run + result = self._invoke_run(role) + ^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/agent/server/api.py", line 906, in _invoke_run + num_nodes_waiting = rdzv_handler.num_nodes_waiting() + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py", line 1263, in num_nodes_waiting + self._state_holder.sync() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py", line 437, in sync + get_response = self._backend.get_state() + ^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/rendezvous/c10d_rendezvous_backend.py", line 75, in get_state + base64_state: bytes = self._call_store("get", self._key) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/elastic/rendezvous/c10d_rendezvous_backend.py", line 119, in _call_store + raise RendezvousConnectionError( +torch.distributed.elastic.rendezvous.api.RendezvousConnectionError: The connection to the C10d store has failed. See inner exception for details. ++ set +x ++ for ctx_length in 1024 2048 4096 8192 12288 16384 24576 32768 40960 49152 65536 81920 98304 131072 ++ export PROF_CTX_LENGTH=24576 ++ PROF_CTX_LENGTH=24576 ++ name='/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L24576*tp4.cp8.bs1.json' ++ '[' -f '/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L24576*tp4.cp8.bs1.json' ']' ++ echo 'Running ctx_length=24576, TP_SIZE=4, CP_SIZE=8, BATCH_SIZE=1' ++ srun bash ./attnserver.sh ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 3 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 24576 --max-position-embeddings 24576 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 2 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 24576 --max-position-embeddings 24576 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 1 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 24576 --max-position-embeddings 24576 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 0 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 24576 --max-position-embeddings 24576 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:30:07.928000 169552 site-packages/torch/distributed/run.py:766] +W0621 21:30:07.928000 169552 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:30:07.928000 169552 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:30:07.928000 169552 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:30:07.927000 2088172 site-packages/torch/distributed/run.py:766] +W0621 21:30:07.927000 2088172 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:30:07.927000 2088172 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:30:07.927000 2088172 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:30:07.959000 3458403 site-packages/torch/distributed/run.py:766] +W0621 21:30:07.959000 3458403 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:30:07.959000 3458403 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:30:07.959000 3458403 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:30:07.988000 3389499 site-packages/torch/distributed/run.py:766] +W0621 21:30:07.988000 3389499 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:30:07.988000 3389499 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:30:07.988000 3389499 site-packages/torch/distributed/run.py:766] ***************************************** +[rank20]:[W621 21:30:30.559010367 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 20] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank2]:[W621 21:30:30.220529382 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 2] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank6]:[W621 21:30:30.220901916 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 6] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank4]:[W621 21:30:30.223314535 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 4] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank28]:[W621 21:30:30.635190800 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 28] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank30]:[W621 21:30:30.635613300 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 30] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank26]:[W621 21:30:30.635669245 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 26] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank27]:[W621 21:30:30.635679205 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 27] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank7]:[W621 21:30:30.227717209 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 7] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank31]:[W621 21:30:30.636227911 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 31] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank3]:[W621 21:30:30.229021228 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 3] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank29]:[W621 21:30:30.636392332 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 29] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank25]:[W621 21:30:30.637895067 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 25] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank1]:[W621 21:30:30.229945691 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 1] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank5]:[W621 21:30:30.229943611 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 5] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank21]:[W621 21:30:30.571913929 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 21] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank22]:[W621 21:30:30.572022987 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 22] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank18]:[W621 21:30:30.572063985 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 18] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank19]:[W621 21:30:30.572167185 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 19] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank23]:[W621 21:30:30.572171815 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 23] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank17]:[W621 21:30:30.572349455 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 17] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank12]:[W621 21:30:30.129121607 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 12] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank15]:[W621 21:30:30.129200936 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 15] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank10]:[W621 21:30:30.129209185 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 10] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank11]:[W621 21:30:30.129217791 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 11] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank14]:[W621 21:30:30.129217948 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 14] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank13]:[W621 21:30:30.129241187 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 13] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank9]:[W621 21:30:30.129352100 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 9] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank16]:[W621 21:30:30.663816090 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 16] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank24]:[W621 21:30:30.740741353 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 24] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank8]:[W621 21:30:30.244416426 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 8] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank0]:[W621 21:30:30.420635996 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 0] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py:915: FutureWarning: `load_state_dict` is deprecated and will be removed in future versions. Please use `load` instead. + checkpoint.load_state_dict( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/planner_helpers.py:406: FutureWarning: Please use DTensor instead and we are deprecating ShardedTensor. + device = getattr(value, "device", None) +[rank30]: Traceback (most recent call last): +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank30]: pretrain( +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank30]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank30]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank30]: ^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank30]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank30]: return _load_global_dist_base_checkpoint( +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank30]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank30]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank23]: Traceback (most recent call last): +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank23]: pretrain( +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank23]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank23]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank23]: ^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank23]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank2]: Traceback (most recent call last): +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank2]: pretrain( +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank2]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank2]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank2]: ^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank2]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank10]: Traceback (most recent call last): +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank10]: pretrain( +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank10]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank10]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank10]: ^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank10]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank30]: checkpoint.load_state_dict( +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank30]: return arg(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank30]: return _load_state_dict( +[rank30]: ^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank23]: return _load_global_dist_base_checkpoint( +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank23]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank23]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank2]: return _load_global_dist_base_checkpoint( +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank2]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank2]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank10]: return _load_global_dist_base_checkpoint( +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank10]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank10]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank30]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank30]: raise result +[rank30]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank30]: Traceback (most recent call last): (RANK 0) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank23]: checkpoint.load_state_dict( +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank23]: return arg(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank23]: return _load_state_dict( +[rank23]: ^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank2]: checkpoint.load_state_dict( +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank2]: return arg(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank2]: return _load_state_dict( +[rank2]: ^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank10]: checkpoint.load_state_dict( +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank10]: return arg(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank10]: return _load_state_dict( +[rank10]: ^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank23]: raise result +[rank23]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank23]: Traceback (most recent call last): (RANK 0) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank2]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank2]: raise result +[rank2]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank2]: Traceback (most recent call last): (RANK 0) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank10]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank10]: raise result +[rank10]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank10]: Traceback (most recent call last): (RANK 0) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 1) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 2) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 1) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank2]: Traceback (most recent call last): (RANK 1) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 1) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 2) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 2) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 2) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank30]: raise CheckpointingException(_msg) +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 3) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank23]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: raise CheckpointingException(_msg) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 4) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 3) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank2]: Traceback (most recent call last): (RANK 3) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 3) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 4) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 4) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 4) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 5) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 6) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 5) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 5) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 5) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 6) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 6) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 7) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 6) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 7) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 7) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: Traceback (most recent call last): (RANK 8) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 7) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: Traceback (most recent call last): (RANK 8) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 8) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank10]: Traceback (most recent call last): (RANK 8) +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 9) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 9) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 9) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 10) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank2]: Traceback (most recent call last): (RANK 9) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 10) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 10) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 10) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 11) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 11) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 11) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 12) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank2]: Traceback (most recent call last): (RANK 11) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 13) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 12) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 12) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 12) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 13) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 13) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 13) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 14) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 15) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 14) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 14) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 14) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 15) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 15) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank30]: raise CheckpointingException(_msg) +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 15) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 16) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank23]: raise CheckpointingException(_msg) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 16) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank10]: raise CheckpointingException(_msg) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 17) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 16) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 16) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 17) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 17) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 18) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: Traceback (most recent call last): (RANK 17) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 19) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 18) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 18) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 18) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 19) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 19) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 20) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 20) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: Traceback (most recent call last): (RANK 19) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 20) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: Traceback (most recent call last): (RANK 21) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 20) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: Traceback (most recent call last): (RANK 21) +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank10]: Traceback (most recent call last): (RANK 21) +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 22) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 21) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 22) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 22) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 23) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 22) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 23) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 23) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 24) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 23) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 24) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 24) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 24) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 25) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 26) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 25) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 25) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 26) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank2]: Traceback (most recent call last): (RANK 25) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 26) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 26) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 27) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 28) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 27) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 27) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 28) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank2]: Traceback (most recent call last): (RANK 27) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 28) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank30]: raise CheckpointingException(_msg) +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 28) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 29) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank23]: raise CheckpointingException(_msg) +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank10]: raise CheckpointingException(_msg) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 30) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 29) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 29) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 29) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 30) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 30) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank30]: Traceback (most recent call last): (RANK 31) +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank30]: local_data = map_fun() +[rank30]: ^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank30]: result = func(*args, **kwargs) +[rank30]: ^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 30) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: local_plan = planner.create_local_plan() +[rank30]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank30]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank30]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank30]: raise CheckpointingException(_msg) +[rank30]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +[rank28]: Traceback (most recent call last): +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank28]: pretrain( +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank23]: Traceback (most recent call last): (RANK 31) +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank23]: local_data = map_fun() +[rank23]: ^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank23]: result = func(*args, **kwargs) +[rank23]: ^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank10]: Traceback (most recent call last): (RANK 31) +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank10]: local_data = map_fun() +[rank10]: ^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank10]: result = func(*args, **kwargs) +[rank10]: ^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank28]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank28]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank28]: ^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank28]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: local_plan = planner.create_local_plan() +[rank23]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank23]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank23]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank23]: raise CheckpointingException(_msg) +[rank23]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +[rank19]: Traceback (most recent call last): +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank19]: pretrain( +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank2]: Traceback (most recent call last): (RANK 31) +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank2]: local_data = map_fun() +[rank2]: ^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank2]: result = func(*args, **kwargs) +[rank2]: ^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank2]: local_plan = planner.create_local_plan() +[rank10]: local_plan = planner.create_local_plan() +[rank10]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank10]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank10]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank10]: raise CheckpointingException(_msg) +[rank10]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +[rank8]: Traceback (most recent call last): +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank8]: pretrain( +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank28]: return _load_global_dist_base_checkpoint( +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank28]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank28]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank19]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank19]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank19]: ^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank19]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank2]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank2]: raise CheckpointingException(_msg) +[rank2]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +[rank0]: Traceback (most recent call last): +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank0]: pretrain( +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank8]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank8]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank8]: ^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank8]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank28]: checkpoint.load_state_dict( +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank28]: return arg(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank28]: return _load_state_dict( +[rank28]: ^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank28]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank19]: return _load_global_dist_base_checkpoint( +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank19]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank19]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank0]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank8]: return _load_global_dist_base_checkpoint( +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank8]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank8]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank19]: checkpoint.load_state_dict( +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank19]: return arg(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank19]: return _load_state_dict( +[rank19]: ^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank19]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank0]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank0]: ^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank0]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank0]: return _load_global_dist_base_checkpoint( +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank8]: checkpoint.load_state_dict( +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank8]: return arg(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank8]: return _load_state_dict( +[rank8]: ^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank8]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank28]: raise result +[rank28]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank28]: Traceback (most recent call last): (RANK 0) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank0]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank0]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank0]: checkpoint.load_state_dict( +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank19]: raise result +[rank19]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank19]: Traceback (most recent call last): (RANK 0) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank0]: return arg(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank0]: return _load_state_dict( +[rank0]: ^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank0]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank8]: raise result +[rank8]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank8]: Traceback (most recent call last): (RANK 0) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: Traceback (most recent call last): (RANK 1) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank0]: raise result +[rank0]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank0]: Traceback (most recent call last): (RANK 0) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 1) +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 2) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: Traceback (most recent call last): (RANK 1) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 2) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 1) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 2) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: Traceback (most recent call last): (RANK 3) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 2) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 3) +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 4) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: Traceback (most recent call last): (RANK 3) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 3) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 4) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: ^^^^^^^^^ +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 4) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank28]: Traceback (most recent call last): (RANK 5) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 4) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 5) +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 6) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: Traceback (most recent call last): (RANK 5) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 5) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 6) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 6) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 7) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank0]: raise CheckpointingException(_msg) +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 7) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 6) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 7) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 8) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 7) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line[rank24]: Traceback (most recent call last): +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank24]: pretrain( +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank24]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank24]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank24]: ^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 8) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 8) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank24]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank24]: return _load_global_dist_base_checkpoint( +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank24]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line[rank20]: Traceback (most recent call last): +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank20]: pretrain( +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank20]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank20]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank20]: ^^^^^^^^^^^^^^^^ +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 8) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/minicond[rank1]: Traceback (most recent call last): +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank24]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank24]: checkpoint.load_state_dict( +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank24]: return arg(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank24]: return _load_state_dict( +[rank24]: ^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank20]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank20]: return _load_global_dist_base_checkpoint( +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank20]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank1]: pretrain( +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 9) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank24]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank24]: raise result +[rank24]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank24]: Traceback (most recent call last): (RANK 0) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank20]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank20]: checkpoint.load_state_dict( +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank20]: return arg(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank20]: return _load_state_dict( +[rank20]: ^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank1]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank1]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank1]: ^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank1]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 10) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: re[rank26]: Traceback (most recent call last): +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank26]: pretrain( +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank26]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank20]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank20]: raise result +[rank20]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank20]: Traceback (most recent call last): (RANK 0) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank1]: return _load_global_dist_base_checkpoint( +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank1]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank1]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank26]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank26]: ^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank26]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank26]: return _load_global_dist_base_checkpoint( +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: re[rank17]: Traceback (most recent call last): +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank17]: pretrain( +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank17]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank1]: checkpoint.load_state_dict( +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank1]: return arg(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank1]: return _load_state_dict( +[rank1]: ^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank1]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank26]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank26]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank26]: checkpoint.load_state_dict( +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank17]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank17]: ^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank17]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank17]: return _load_global_dist_base_checkpoint( +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank1]: raise result +[rank1]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank1]: Traceback (most recent call last): (RANK 0) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: Traceback (most recent call last): (RANK 11) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/li[rank15]: Traceback (most recent call last): +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank15]: pretrain( +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank15]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank15]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank15]: ^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank26]: return arg(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank26]: return _load_state_dict( +[rank26]: ^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank17]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank17]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank17]: checkpoint.load_state_dict( +[rank1]: result = func(*args, **kwargs) +[rank1]: [rank7]: Traceback (most recent call last): +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank7]: pretrain( +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank15]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank15]: return _load_global_dist_base_checkpoint( +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank15]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank26]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank26]: raise result +[rank26]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank26]: Traceback (most recent call last): (RANK 0) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank17]: return arg(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank17]: return _load_state_dict( +[rank17]: ^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank7]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank7]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank7]: ^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank7]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank15]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank15]: checkpoint.load_state_dict( +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank15]: return arg(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank15]: return _load_state_dict( +[rank15]: ^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: re 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank17]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank17]: raise result +[rank17]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank17]: Traceback (most recent call last): (RANK 0) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank7]: return _load_global_dist_base_checkpoint( +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank7]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank7]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank15]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank15]: raise result +[rank15]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank15]: Traceback (most recent call last): (RANK 0) +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 9) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: re[rank16]: Traceback (most recent call last): +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank16]: pretrain( +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank16]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank7]: checkpoint.load_state_dict( +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank7]: return arg(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank7]: return _load_state_dict( +[rank7]: ^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank7]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: re[rank9]: Traceback (most recent call last): +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank9]: pretrain( +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank9]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank16]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank16]: ^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank16]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank16]: return _load_global_dist_base_checkpoint( +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank7]: raise result +[rank7]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank7]: Traceback (most recent call last): (RANK 0) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank9]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank9]: ^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank9]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank9]: return _load_global_dist_base_checkpoint( +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 10) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank16]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank16]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank16]: checkpoint.load_state_dict( +[rank7]: result = func(*args, **kwargs) +[rank7]: [rank3]: Traceback (most recent call last): +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank3]: pretrain( +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank9]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank9]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank9]: checkpoint.load_state_dict( +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank16]: return arg(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank16]: return _load_state_dict( +[rank16]: ^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank3]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank3]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank3]: ^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank3]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank9]: return arg(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank9]: return _load_state_dict( +[rank9]: ^^^^^^^^^^^^^^^^^ +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24[rank27]: Traceback (most recent call last): +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank27]: pretrain( +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank27]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank16]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank16]: raise result +[rank16]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank16]: Traceback (most recent call last): (RANK 0) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank3]: return _load_global_dist_base_checkpoint( +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank3]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank3]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank9]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank9]: raise result +[rank9]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank9]: Traceback (most recent call last): (RANK 0) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank27]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank27]: ^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank27]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank27]: return _load_global_dist_base_checkpoint( +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: re 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank3]: checkpoint.load_state_dict( +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank3]: return arg(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank3]: return _load_state_dict( +[rank3]: ^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank3]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: [rank13]: Traceback (most recent call last): +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank13]: pretrain( +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank13]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank27]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank27]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank27]: checkpoint.load_state_dict( +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 9) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank3]: raise result +[rank3]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank3]: Traceback (most recent call last): (RANK 0) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank13]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank13]: ^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank13]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank13]: return _load_global_dist_base_checkpoint( +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank27]: return arg(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^ +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank3]: result = func(*args, **kwargs) +[rank3]: [rank4]: Traceback (most recent call last): +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank4]: pretrain( +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank13]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank13]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank13]: checkpoint.load_state_dict( +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank27]: return _load_state_dict( +[rank27]: ^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank27]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank27]: raise result +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 10) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank4]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank4]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank4]: ^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank4]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank13]: return arg(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank13]: return _load_state_dict( +[rank27]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank27]: Traceback (most recent call last): (RANK 0) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: re[rank25]: Traceback (most recent call last): +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank25]: pretrain( +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank4]: return _load_global_dist_base_checkpoint( +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank4]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank4]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: ^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank13]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank13]: raise result +[rank13]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank13]: Traceback (most recent call last): (RANK 0) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank25]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank25]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank25]: ^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank25]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24[rank22]: Traceback (most recent call last): +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank22]: pretrain( +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank22]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank4]: checkpoint.load_state_dict( +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank4]: return arg(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank4]: return _load_state_dict( +[rank4]: ^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank4]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: reb/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank25]: return _load_global_dist_base_checkpoint( +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank25]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank25]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank22]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank22]: ^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank22]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank22]: return _load_global_dist_base_checkpoint( +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank4]: raise result +[rank4]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank4]: Traceback (most recent call last): (RANK 0) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 12) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank25]: checkpoint.load_state_dict( +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank25]: return arg(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank22]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank22]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank22]: checkpoint.load_state_dict( +[rank4]: result = func(*args, **kwargs) +[rank4]: a/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank25]: return _load_state_dict( +[rank25]: ^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank25]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank25]: raise result +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank22]: return arg(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^ +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 9) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank25]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank25]: Traceback (most recent call last): (RANK 0) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank22]: return _load_state_dict( +[rank22]: ^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank22]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank22]: raise result +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 13) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 1) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank22]: Traceback (most recent call last): (RANK 0) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: re[rank21]: Traceback (most recent call last): +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank21]: pretrain( +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 10) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatrosult = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank21]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank21]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank21]: ^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank21]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 11) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/li[rank6]: Traceback (most recent call last): +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 1) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank21]: return _load_global_dist_base_checkpoint( +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank21]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank21]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 2) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank21]: checkpoint.load_state_dict( +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank21]: return arg(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^ +[rank6]: pretrain( +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank6]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank6]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank6]: ^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank6]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: Traceback (most recent call last): (RANK 2) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embeddsult = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank21]: return _load_state_dict( +[rank21]: ^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank21]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank21]: raise result +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank6]: return _load_global_dist_base_checkpoint( +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank6]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank6]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 1) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank21]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank21]: Traceback (most recent call last): (RANK 0) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: re[rank18]: Traceback (most recent call last): +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank18]: pretrain( +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank6]: checkpoint.load_state_dict( +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank6]: return arg(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank6]: return _load_state_dict( +[rank6]: ^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank6]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedd[rank12]: Traceback (most recent call last): +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank12]: pretrain( +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank12]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank18]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank18]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank18]: ^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank18]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank6]: raise result +[rank6]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank6]: Traceback (most recent call last): (RANK 0) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank12]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank12]: ^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank12]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank12]: return _load_global_dist_base_checkpoint( +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: raise CheckpointingException(_msg) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank18]: return _load_global_dist_base_checkpoint( +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank18]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank18]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: result = func(*args, **kwargs) +[rank6]: [rank5]: Traceback (most recent call last): +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank12]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank12]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank12]: checkpoint.load_state_dict( +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 2) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank18]: checkpoint.load_state_dict( +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank18]: return arg(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^ +[rank5]: pretrain( +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank5]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank5]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank5]: ^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank5]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank12]: return arg(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^ +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embeddsult = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank18]: return _load_state_dict( +[rank18]: ^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank18]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank18]: raise result +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank5]: return _load_global_dist_base_checkpoint( +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank5]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank5]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank12]: return _load_state_dict( +[rank12]: ^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank12]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank12]: raise result +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 1) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank18]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank18]: Traceback (most recent call last): (RANK 0) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank5]: checkpoint.load_state_dict( +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank5]: return arg(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank5]: return _load_state_dict( +[rank5]: ^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank5]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank12]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank12]: Traceback (most recent call last): (RANK 0) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 1) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank5]: raise result +[rank5]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank5]: Traceback (most recent call last): (RANK 0) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 1) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: raise CheckpointingException(_msg) +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 2) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 1) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embeddsult = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 2) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 2) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 1) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedd576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 11) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 2) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embeddn/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 14) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank25]: raise CheckpointingException(_msg) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 12) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 15) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 2) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 1) +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 3) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 13) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dissult = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 2) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 16) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpointsult = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 4) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 5) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: Traceback (most recent call last): (RANK 1) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"ing.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 3) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 1) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 1) +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 2) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 4) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 2) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 3) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 2) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 5) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 3) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 4) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 11) +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 1) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 4) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 5) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 12) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 2) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"ing.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 3) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 5) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"sult = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 3) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 4) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: Traceback (most recent call last): (RANK 13) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dis[rank31]: Traceback (most recent call last): +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 1) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank31]: pretrain( +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank31]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank31]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank31]: ^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank31]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 4) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 5) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank31]: return _load_global_dist_base_checkpoint( +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank31]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank31]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 2) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 5) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 6) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank31]: checkpoint.load_state_dict( +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank31]: return arg(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank31]: return _load_state_dict( +[rank31]: ^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank31]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 3) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank31]: raise result +[rank31]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank31]: Traceback (most recent call last): (RANK 0) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_b/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: Traceback (most recent call last): (RANK 7) +[rank31]: re[rank29]: Traceback (most recent call last): +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank29]: pretrain( +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank29]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank29]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank29]: ^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 4) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 12) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank29]: return _load_global_dist_base_checkpoint( +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank29]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 8) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank29]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 5) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 13) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank29]: checkpoint.load_state_dict( +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank29]: return arg(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank29]: return _load_state_dict( +[rank29]: ^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank29]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: Traceback (most recent call last): (RANK 17) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank29]: raise result +[rank29]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank29]: Traceback (most recent call last): (RANK 0) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 6) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 14) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank29]: ret_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 14) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 18) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank20]: Traceback (most recent call last): (RANK 7) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 15) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 19) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in ing.position_embeddings.weight +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 15) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 16) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank15]: Traceback (most recent call last): (RANK 3) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 16) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 8) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", linesult = func(*args, **kwargs) +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 4) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/sult = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 1) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank18]: Traceback (most recent call last): (RANK 1) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: Traceback (most recent call last): (RANK 17) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 2) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 18) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: Traceback (most recent call last): (RANK 5) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py" ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 2) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 1) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embeddt_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 14) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embeddsult = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: Traceback (most recent call last): (RANK 19) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 1) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 20) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 2) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 15) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 2) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 16) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 21) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank9]: Traceback (most recent call last): (RANKreduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/, line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 6) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 20) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 3) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 22) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 4) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 7) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 21) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: ^^^^^^^^^ +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 8) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 23) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_sh 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 5) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 9) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 24) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 9) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"ing.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 3) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 10) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 4) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planne ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 10) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank31]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 1) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 17) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 2) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24 3) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 5) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 4) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"ing.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 3) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 4) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank19]: Traceback (most recent call last): (RANK 18) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 3) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 5) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^ +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 4) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_apes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 22) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 9) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 5) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 5) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 23) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 6) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 24) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: Traceback (most recent call last): (RANK 10) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_ 3) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planne576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 11) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 7) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24sult = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 4) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 8) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 1) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 12) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line, line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 6) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 5) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 13) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/distensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: Traceback (most recent call last): (RANK 2) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_ 3) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 6) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 7) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 3) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 4) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 8) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 7) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line, line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 6) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 5) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: Traceback (most recent call last): (RANK 4) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 8) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/minicond, line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 5) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_ ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 6) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 7) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 11) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 1) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 8) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 2) +[rank12]: Traceback (most recent call last): (RANK 7) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", lineing.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 3) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 12) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 8) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", liner.create_local_plan() +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 4) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 13) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 25) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dis576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 11) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANKtensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 6) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 5) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: raise CheckpointingException(_msg) +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 7) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 26) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank24]: Traceback (most recent call last): (RANK 6) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 12) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 8) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 27) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 13) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank8]: ^^^ 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 7) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dising.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 3) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/minicondtensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 6) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 9) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 10) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank24]: Traceback (most recent call last): (RANK 8) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 4) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 7) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 9) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 5) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 8) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"t_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 28) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 14) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/minicondr.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 29) +[rank31]: Traceback (most recent call last): (RANK 10) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 25) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24, line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 15) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 26) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.distt_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 14) +[rank29]: Traceback (most recent call last): (RANK 6) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 7) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 16) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/t_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: raise CheckpointingException(_msg) +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 15) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 14) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 27) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 6) +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 8) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 16) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/a/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 9) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 15) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 7) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 9) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 16) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/, line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 8) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 10) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 10) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 6) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 11) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 9) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 11) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/li_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 30) +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 7) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank31]: raise CheckpointingException(_msg) +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 10) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 12) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 8) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", linedistributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank8]: Traceback (most recent call last): (RANK 31) +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank8]: local_data = map_fun() +[rank8]: ^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank8]: result = func(*args, **kwargs) +[rank8]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 13) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 11) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/li^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank8]: local_plan = planner.create_local_plan() +[rank8]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank8]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank8]: raise CheckpointingException(_msg) +[rank8]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dis 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: Traceback (most recent call last): (RANK 17) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 28) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 9) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: Traceback (most recent call last): (RANK 17) +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 29) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 18) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 10) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist 3) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 11) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 18) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: Traceback (most recent call last): (RANK 11) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torcdistributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 4) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 12) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 12) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 5) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 13) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 17) +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 13) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_b/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dish.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 19) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 14) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 18) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 12) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 20) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank31]: Traceback (most recent call last): (RANK 15) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 13) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 16) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: Traceback (most recent call last): (RANK 9) +[rank3]: ^^^^^^^^^ +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 21) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatroa/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junt_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 14) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 10) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 9) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank28]: Traceback (most recent call last): (RANK 17) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 15) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 18) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24, line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 6) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 10) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 16) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/da/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 22) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: Traceback (most recent call last): (RANK 11) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 11) +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 12) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: Traceback (most recent call last): (RANK 7) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/li_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 30) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 23) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 8) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank0]: Traceback (most recent call last): (RANK 31) +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 13) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", lineh.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 19) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_data = map_fun() +[rank0]: ^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank0]: result = func(*args, **kwargs) +[rank0]: ^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank0]: local_plan = planner.create_local_plan() +[rank0]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank0]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: Traceback (most recent call last): (RANK 24) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnsedistributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/disdistributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank0]: raise CheckpointingException(_msg) +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 20) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank0]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 6) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 17) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 17) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 18) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 18) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 21) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 7) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junh.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 19) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 8) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torcrver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: ^^^^^^^^^ +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: raise CheckpointingException(_msg) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 20) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/minicond 3) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 25) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 9) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 4) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 10) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 21) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: Traceback (most recent call last): (RANK 26) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/juning.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 3) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 5) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingExceptionh.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 19) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24t_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 14) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 4) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: local_data = map_fun() +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_a/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 5) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 9) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: Traceback (most recent call last): (RANK 20) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 15) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"h.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 19) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 10) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 21) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 16) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junb/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/h.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 19) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 20) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 11) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/litensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 6) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 12) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 20) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 21) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 7) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 13) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 21) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: Traceback (most recent call last): (RANK 22) +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatro: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 27) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/jun576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 11) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 8) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 23) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 12) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 9) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 28) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 24) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnse576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 11) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 29) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 13) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 10) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1da/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/disdistributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 22) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: Traceback (most recent call last): (RANK 12) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 11) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 23) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 17) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 13) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dis 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 12) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 24) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnsen/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 18) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 13) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank9]: Traceback (most recent call last): (RANK 14) +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 9) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 15) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 9) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 10) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank7]: Traceback (most recent call last): (RANK 14) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 15) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 16) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpointrver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: Traceback (most recent call last): (RANK 10) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24t_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 25) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 25) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 14) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank7]: Traceback (most recent call last): (RANK 16) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpointn/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 26) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 14) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 26) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 15) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingExceptionda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 15) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 22) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 16) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/h.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 19) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 23) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 17) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: Traceback (most recent call last): (RANK 16) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpointb/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 24) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnse, line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 18) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank29]: Traceback (most recent call last): (RANK 20) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 6) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 12) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 19) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in [rank14]: Traceback (most recent call last): +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 21) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 7) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank14]: pretrain( +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junh.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 19) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 13) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank14]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank14]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank14]: ^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank14]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: result = func(*args, **kwargs) +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 8) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 27) +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank14]: return _load_global_dist_base_checkpoint( +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank14]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank14]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatroa/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank14]: checkpoint.load_state_dict( +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank14]: return arg(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank14]: return _load_state_dict( +[rank14]: ^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank14]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank28]: Traceback (most recent call last): (RANK 20) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 28) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 9) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank14]: raise result +[rank14]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank14]: Traceback (most recent call last): (RANK 0) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 21) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank14]: re[rank11]: Traceback (most recent call last): +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/./pretrain_gpt_profile.py", line 554, in +[rank11]: pretrain( +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/jun576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 11) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 29) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 10) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 805, in pretrain +[rank11]: model, optimizer, opt_param_scheduler = setup_model_and_optimizer( +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/training.py", line 1283, in setup_model_and_optimizer +[rank11]: args.iteration, args.num_floating_point_operations_so_far = load_checkpoint( +[rank11]: ^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 1374, in load_checkpoint +[rank11]: state_dict, checkpoint_name, release, ckpt_type = _load_base_checkpoint( +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 974, in _load_base_checkpoint +[rank11]: return _load_global_dist_base_checkpoint( +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/training/checkpointing.py", line 870, in _load_global_dist_base_checkpoint +[rank11]: state_dict = dist_checkpointing.load(sharded_state_dict, checkpoint_name, load_strategy, strict=args.dist_ckpt_strictness) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/serialization.py", line 148, in load +[rank11]: loaded_state_dict = sharded_strategy.load(sharded_state_dict, checkpoint_dir) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: Traceback (most recent call last): (RANK 12) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 11) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/litensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 915, in load +[rank11]: checkpoint.load_state_dict( +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +[rank11]: return arg(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 41, in load_state_dict +[rank11]: return _load_state_dict( +[rank11]: ^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 234, in _load_state_dict +[rank11]: central_plan: LoadPlan = distW.reduce_scatter("plan", local_step, global_step) +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 13) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: Traceback (most recent call last): (RANK 25) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 6) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 219, in reduce_scatter +[rank11]: raise result +[rank11]: torch.distributed.checkpoint.api.CheckpointException: CheckpointException ranks:dict_keys([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) +[rank11]: Traceback (most recent call last): (RANK 0) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 26) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 7) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: rereduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 14) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 15) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 20) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 8) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/minicondb/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 21) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 16) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/da/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: Traceback (most recent call last): (RANK 9) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 10) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 12) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_sh3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 30) +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 22) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 13) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 23) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: ^^^^^^^^^ +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank13]: Traceback (most recent call last): (RANK 31) +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank13]: local_data = map_fun() +[rank13]: ^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: Traceback (most recent call last): (RANK 11) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatro/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank13]: result = func(*args, **kwargs) +[rank13]: ^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank13]: local_plan = planner.create_local_plan() +[rank13]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank13]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank13]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank13]: raise CheckpointingException(_msg) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 24) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnseda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 12) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 17) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank13]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +sult = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 22) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 13) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 18) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: Traceback (most recent call last): (RANK 1) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 23) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/disda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 22) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 19) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in /state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 24) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnset_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 17) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 2) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 23) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 14) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 24) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 18) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedd: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 27) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 15) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnse576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 11) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 19) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in n/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank12]: Traceback (most recent call last): (RANK 28) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 16) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/da/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 12) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 14) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 29) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 15) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 22) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1sult = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: raise CheckpointingException(_msg) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 1) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: Traceback (most recent call last): (RANK 23) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 16) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpointb/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 24) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 13) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: Traceback (most recent call last): (RANK 2) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: local_plan = planner.create_local_plan() +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 12) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 14) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 13) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embeddapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 22) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank31]: Traceback (most recent call last): (RANK 25) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 26) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 15) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 23) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingExceptiondistributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 16) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatroa/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 24) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: local_data = map_fun() +[rank6]: Traceback (most recent call last): (RANK 9) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 30) +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 10) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planneing.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 3) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: Traceback (most recent call last): (RANK 17) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 18) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank22]: Traceback (most recent call last): (RANK 31) +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank22]: local_data = map_fun() +[rank22]: ^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 11) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lin/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 4) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torcdistributed/checkpoint/logger.py", line 87, in wrapper +[rank22]: result = func(*args, **kwargs) +[rank22]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank22]: local_plan = planner.create_local_plan() +[rank22]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank22]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank22]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank22]: raise CheckpointingException(_msg) +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 14) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 5) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank22]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 15) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"2]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank25]: Traceback (most recent call last): (RANK 17) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: Traceback (most recent call last): (RANK 25) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 30) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 18) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 26) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 16) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpointreduce_scatter +[rank7]: local_data = map_fun() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: raise CheckpointingException(_msg) +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank12]: Traceback (most recent call last): (RANK 31) +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: local_data = map_fun() +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingExceptiondistributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank12]: ^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank12]: result = func(*args, **kwargs) +[rank12]: ^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: local_plan = planner.create_local_plan() +[rank12]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank12]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank12]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 20) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank12]: raise CheckpointingException(_msg) +[rank31]: Traceback (most recent call last): (RANK 27) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 21) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank12]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +ing.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 3) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 28) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: Traceback (most recent call last): (RANK 17) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: local_data = map_fun() +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 4) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 18) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shreduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 29) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 5) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3h.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 20) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: Traceback (most recent call last): (RANK 19) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: Traceback (most recent call last): (RANK 27) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 21) +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py"r.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 20) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 28) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank9]: Traceback (most recent call last): (RANK 25) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 26) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 21) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 29) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_sh/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/jundistributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1t_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: Traceback (most recent call last): (RANK 17) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 27) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 14) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 18) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^, line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 6) +[rank24]: Traceback (most recent call last): (RANK 17) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 18) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 15) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 19) +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 7) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 16) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in n/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 14) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 19) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^ +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 8) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 20) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/h.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 19) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 15) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line, line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 6) +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 20) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 21) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 16) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 7) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 22) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 21) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpointb/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 12) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 8) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 23) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/jun9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 30) +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 13) +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 9) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: Traceback (most recent call last): (RANK 24) +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: Traceback (most recent call last): (RANK 31) +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank19]: local_data = map_fun() +[rank19]: ^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnseh.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 19) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: result = func(*args, **kwargs) +[rank19]: ^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank19]: local_plan = planner.create_local_plan() +[rank19]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank19]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank19]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank19]: raise CheckpointingException(_msg) +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatro/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 10) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank19]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: Traceback (most recent call last): (RANK 17) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: Traceback (most recent call last): (RANK 20) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 18) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 11) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 21) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: Traceback (most recent call last): (RANK 17) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 18) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 19) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 12) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junrver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in apes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 22) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 13) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 25) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torcda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 23) +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 22) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dis, line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 26) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 6) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 23) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 24) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 7) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingExceptionrver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 24) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planneapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 22) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 25) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnseh.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: raise CheckpointingException(_msg) +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank17]: Traceback (most recent call last): (RANK 19) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: Traceback (most recent call last): (RANK 23) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 8) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 26) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 20) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 9) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 24) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingExceptionda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 21) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = plannereduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 10) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: Traceback (most recent call last): (RANK 22) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 20) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 11) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 23) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: File "/mnt/weka/home/hao.zhang/jun: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 27) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 28) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 21) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 12) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 24) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnse1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 29) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_sh/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank15]: Traceback (most recent call last): (RANK 13) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 30) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank20]: local_plan = planner.create_local_plan() +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 17) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank31]: raise CheckpointingException(_msg) +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank31]: Traceback (most recent call last): (RANK 31) +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank31]: local_data = map_fun() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2da/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 22) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 18) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 14) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: ^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank31]: result = func(*args, **kwargs) +[rank31]: ^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank31]: local_plan = planner.create_local_plan() +[rank31]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank31]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank31]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank31]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 23) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 15) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank31]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 27) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 24) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank5]: Traceback (most recent call last): (RANK 19) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in n/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 14) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 16) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/t_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: Traceback (most recent call last): (RANK 28) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnse0]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 30) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 15) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 14) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 29) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank20]: Traceback (most recent call last): (RANK 31) +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank20]: local_data = map_fun() +[rank20]: ^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank20]: result = func(*args, **kwargs) +[rank20]: ^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 16) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 15) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: local_plan = planner.create_local_plan() +[rank20]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank20]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank20]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank20]: raise CheckpointingException(_msg) +[rank20]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpointreduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 25) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 20) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 16) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank17]: Traceback (most recent call last): (RANK 25) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 26) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 26) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 17) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingExceptiont_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 21) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 18) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingExceptionrver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shr.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 25) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 14) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 25) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc^^^^^^ +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 26) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 26) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank21]: Traceback (most recent call last): (RANK 15) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: ^^^^^^^^^ +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 28) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 16) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 29) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 27) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 27) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^r.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: ^^^^^^^^^ +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 25) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 28) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank18]: Traceback (most recent call last): (RANK 25) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 26) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: megatron.core.distdistributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 29) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 26) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 17) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 18) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 27) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 27) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^apes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 22) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: ^^^^^^^^^ +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 28) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 27) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 29) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 28) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 23) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 9) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 10) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank2: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 27) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 29) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 24) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: ^^^^^^^^^ +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1sult = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = plannereduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 28) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 1) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 20) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24h.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 19) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 29) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 2) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 20) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank16]: raise CheckpointingException(_msg) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 21) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 30) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedd: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 27) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_sh/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 21) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank26]: raise CheckpointingException(_msg) +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank26]: Traceback (most recent call last): (RANK 31) +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank26]: local_data = map_fun() +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 17) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/jun_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 30) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: ^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank26]: result = func(*args, **kwargs) +[rank26]: ^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank26]: local_plan = planner.create_local_plan() +[rank26]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank26]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: Traceback (most recent call last): (RANK 28) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: Traceback (most recent call last): (RANK 31) +[rank26]: raise CheckpointingException(_msg) +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 29) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 18) +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank9]: local_data = map_fun() +[rank9]: ^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank9]: result = func(*args, **kwargs) +[rank9]: ^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank9]: local_plan = planner.create_local_plan() +[rank9]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank26]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 30) +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank9]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 19) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in apes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank9]: raise CheckpointingException(_msg) +[rank9]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +h.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 19) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank24]: Traceback (most recent call last): (RANK 31) +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank24]: local_data = map_fun() +[rank24]: ^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 17) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: Traceback (most recent call last): (RANK 22) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank24]: result = func(*args, **kwargs) +[rank24]: ^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank24]: local_plan = planner.create_local_plan() +[rank24]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank24]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank24]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank24]: raise CheckpointingException(_msg) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 23) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 20) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank24]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +[rank21]: Traceback (most recent call last): (RANK 18) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 21) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 30) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 24) +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc8]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planne^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/jun576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 11) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: Traceback (most recent call last): (RANK 31) +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank28]: local_data = map_fun() +[rank28]: ^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank28]: result = func(*args, **kwargs) +[rank28]: ^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank28]: local_plan = planner.create_local_plan() +[rank18]: Traceback (most recent call last): (RANK 30) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 28) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank28]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank28]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank28]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank28]: raise CheckpointingException(_msg) +[rank28]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +, line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank18]: Traceback (most recent call last): (RANK 31) +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank18]: local_data = map_fun() +[rank18]: ^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: Traceback (most recent call last): (RANK 12) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank18]: result = func(*args, **kwargs) +[rank18]: ^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank18]: local_plan = planner.create_local_plan() +[rank18]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank18]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank18]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank18]: raise CheckpointingException(_msg) +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 13) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 6) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank18]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 29) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/disda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 7) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +h.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 19) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 22) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 20) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 23) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 8) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", lineda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank3]: Traceback (most recent call last): (RANK 28) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 22) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 21) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: raise CheckpointingException(_msg) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 29) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 24) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnseda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 22) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.distr.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 22) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 25) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 23) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 23) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: Traceback (most recent call last): (RANK 23) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 24) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 26) +[rank14]: raise CheckpointingException(_msg) +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 24) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnse 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 24) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnset_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnse7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 30) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 27) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^apes +[rank5]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 14) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank27]: Traceback (most recent call last): (RANK 9) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 22) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 15) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 10) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank17]: Traceback (most recent call last): (RANK 31) +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank17]: local_data = map_fun() +[rank17]: ^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank17]: result = func(*args, **kwargs) +[rank17]: ^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 23) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank17]: local_plan = planner.create_local_plan() +[rank17]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank17]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank17]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank17]: raise CheckpointingException(_msg) +[rank17]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank11]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 16) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/rver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank21]: Traceback (most recent call last): (RANK 25) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: Traceback (most recent call last): (RANK 24) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = plannereduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 25) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 25) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 26) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 26) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingExceptioning.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 3) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 20) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 26) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: raise CheckpointingException(_msg) +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 4) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 21) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingExceptionrver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 27) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shr.create_local_plan() +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 25) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 28) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 5) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 25) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 26) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 29) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 6) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: raise CheckpointingException(_msg) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 30) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 26) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingExceptiondistributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 7) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 27) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 17) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank25]: Traceback (most recent call last): (RANK 31) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank25]: local_data = map_fun() +[rank25]: ^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank25]: result = func(*args, **kwargs) +[rank25]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 8) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank1]: ^^^_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 30) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank7]: ^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank25]: local_plan = planner.create_local_plan() +[rank25]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank25]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank25]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank25]: raise CheckpointingException(_msg) +[rank25]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +[rank16]: ^^^^^^^^^ +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: Traceback (most recent call last): (RANK 31) +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank7]: local_data = map_fun() +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 18) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +9]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 27) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 30) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank7]: result = func(*args, **kwargs) +[rank7]: ^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank7]: local_plan = planner.create_local_plan() +[rank7]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank7]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank7]: raise CheckpointingException(_msg) +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torc: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 27) +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank21]: Traceback (most recent call last): (RANK 28) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank7]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 30) +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank29]: Traceback (most recent call last): (RANK 31) +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank29]: local_data = map_fun() +[rank29]: ^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: result = func(*args, **kwargs) +[rank29]: ^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank29]: local_plan = planner.create_local_plan() +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 29) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: Traceback (most recent call last): (RANK 31) +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 28) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank29]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank29]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank29]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank29]: raise CheckpointingException(_msg) +[rank29]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 11) +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank3]: local_data = map_fun() +[rank3]: ^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank3]: result = func(*args, **kwargs) +[rank3]: ^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank3]: local_plan = planner.create_local_plan() +[rank3]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank3]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 29) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank21]: Traceback (most recent call last): (RANK 30) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank3]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank3]: raise CheckpointingException(_msg) +[rank3]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 27) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 12) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank21]: Traceback (most recent call last): (RANK 31) +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank21]: local_data = map_fun() +[rank21]: ^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 28) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 13) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank21]: result = func(*args, **kwargs) +[rank21]: ^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank21]: local_plan = planner.create_local_plan() +[rank21]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank21]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank21]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: raise CheckpointingException(_msg) +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 28) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank21]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + + 87, in wrapper +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 14) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 29) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 29) +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 15) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 9) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1h.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 19) +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 10) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.distr.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 16) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 25) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: raise CheckpointingException(_msg) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 26) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank27]: Traceback (most recent call last): (RANK 17) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 11) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 18) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 12) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank5]: raise CheckpointingException(_msg) +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 20) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 27) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^apes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 22) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 13) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 21) +[rank27]: Traceback (most recent call last): (RANK 19) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 14) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 23) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/jun5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 20) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 24) +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 30) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 15) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 21) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planne^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank15]: Traceback (most recent call last): (RANK 31) +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank15]: local_data = map_fun() +[rank15]: ^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank15]: result = func(*args, **kwargs) +[rank15]: ^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank15]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: Traceback (most recent call last): (RANK 16) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 28) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank15]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 22) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank15]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank15]: raise CheckpointingException(_msg) +[rank15]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 17) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 29) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 30) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 23) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank1]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank14]: Traceback (most recent call last): (RANK 31) +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank14]: local_data = map_fun() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 24) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 18) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 30) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank14]: ^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank14]: result = func(*args, **kwargs) +[rank14]: ^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: local_plan = planner.create_local_plan() +[rank14]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank14]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank14]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: ^^^^^^^^^ +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank14]: raise CheckpointingException(_msg) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 19) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank4]: Traceback (most recent call last): (RANK 31) +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank4]: local_data = map_fun() +[rank4]: ^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank4]: result = func(*args, **kwargs) +[rank4]: ^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank14]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +da/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 22) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 25) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank4]: local_plan = planner.create_local_plan() +[rank4]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank4]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank4]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank4]: raise CheckpointingException(_msg) +[rank4]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 26) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 20) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 23) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 21) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 28) +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 27) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 24) +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 28) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 29) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 22) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.distr.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 25) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 29) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 23) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 25) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 26) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank27]: Traceback (most recent call last): (RANK 30) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: raise CheckpointingException(_msg) +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank27]: Traceback (most recent call last): (RANK 31) +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank27]: local_data = map_fun() +[rank27]: ^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 24) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 26) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 27) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank27]: result = func(*args, **kwargs) +[rank27]: ^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank27]: local_plan = planner.create_local_plan() +[rank27]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank27]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank27]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank27]: raise CheckpointingException(_msg) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 25) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 27) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank27]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: ^^^_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 28) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 26) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: Traceback (most recent call last): (RANK 30) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 29) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank1]: Traceback (most recent call last): (RANK 31) +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: local_data = map_fun() +[rank1]: ^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank1]: result = func(*args, **kwargs) +[rank1]: ^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank1]: local_plan = planner.create_local_plan() +[rank1]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank1]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank1]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank1]: raise CheckpointingException(_msg) +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 27) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank1]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 30) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 30) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank11]: raise CheckpointingException(_msg) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank11]: Traceback (most recent call last): (RANK 31) +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank11]: local_data = map_fun() +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 28) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: Traceback (most recent call last): (RANK 31) +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank5]: local_data = map_fun() +[rank5]: ^^^^^^^^^ +[rank11]: ^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank11]: result = func(*args, **kwargs) +[rank11]: ^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank11]: local_plan = planner.create_local_plan() +[rank11]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank11]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank5]: result = func(*args, **kwargs) +[rank5]: ^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank5]: local_plan = planner.create_local_plan() +[rank5]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank5]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank5]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank5]: raise CheckpointingException(_msg) +[rank11]: raise CheckpointingException(_msg) +[rank16]: Traceback (most recent call last): (RANK 29) +[rank5]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank11]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 28) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 30) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 29) +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank16]: Traceback (most recent call last): (RANK 31) +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank16]: local_data = map_fun() +[rank16]: ^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: result = func(*args, **kwargs) +[rank16]: ^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank16]: local_plan = planner.create_local_plan() +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 30) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank16]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank16]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank16]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank16]: raise CheckpointingException(_msg) +[rank16]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight +[rank6]: Traceback (most recent call last): (RANK 31) +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/utils.py", line 192, in reduce_scatter +[rank6]: local_data = map_fun() +[rank6]: ^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/logger.py", line 87, in wrapper +[rank6]: result = func(*args, **kwargs) +[rank6]: ^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/checkpoint/state_dict_loader.py", line 223, in local_step +[rank6]: local_plan = planner.create_local_plan() +[rank6]: ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 605, in create_local_plan +[rank6]: self._validate_global_shapes(self.metadata, self.shapes_validation_sharded_tensors) +[rank6]: File "/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/dist_checkpointing/strategies/torch.py", line 576, in _validate_global_shapes +[rank6]: raise CheckpointingException(_msg) +[rank6]: megatron.core.dist_checkpointing.core.CheckpointingException: Global shape mismatch for loaded (torch.Size([40960, 4096])) and expected ((24576, 4096)) tensor for key embedding.position_embeddings.weight + +[rank2]:[W621 21:30:42.172563952 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank3]:[W621 21:30:42.195470955 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank7]:[W621 21:30:42.212430194 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank4]:[W621 21:30:42.220406140 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank0]:[W621 21:30:42.232847584 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank1]:[W621 21:30:42.274078808 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank15]:[W621 21:30:42.159798352 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank11]:[W621 21:30:42.159931614 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank5]:[W621 21:30:42.344071402 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank25]:[W621 21:30:42.763568454 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank16]:[W621 21:30:42.702105861 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank10]:[W621 21:30:42.246812909 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank29]:[W621 21:30:42.763744956 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank21]:[W621 21:30:42.705498468 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank20]:[W621 21:30:42.711904733 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank6]:[W621 21:30:42.380387733 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank31]:[W621 21:30:42.792791189 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank22]:[W621 21:30:42.730050109 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank12]:[W621 21:30:42.279332097 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank18]:[W621 21:30:42.740180821 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank24]:[W621 21:30:42.810717899 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank30]:[W621 21:30:42.828950660 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank26]:[W621 21:30:42.839087050 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank28]:[W621 21:30:42.840942391 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank23]:[W621 21:30:42.774679830 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank27]:[W621 21:30:42.843249453 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank19]:[W621 21:30:42.795125504 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank17]:[W621 21:30:42.806133723 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank14]:[W621 21:30:42.398313792 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank8]:[W621 21:30:42.450710499 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank13]:[W621 21:30:42.454294073 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +[rank9]:[W621 21:30:42.696110509 ProcessGroupNCCL.cpp:1476] Warning: WARNING: destroy_process_group() was not called before program exit, which can leak resources. For more info, please see https://pytorch.org/docs/stable/distributed.html#shutdown (function operator()) +W0621 21:30:43.048000 169552 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 169684 closing signal SIGTERM +W0621 21:30:43.050000 169552 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 169685 closing signal SIGTERM +W0621 21:30:43.050000 169552 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 169686 closing signal SIGTERM +W0621 21:30:43.050000 169552 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 169688 closing signal SIGTERM +W0621 21:30:43.050000 169552 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 169689 closing signal SIGTERM +W0621 21:30:43.051000 169552 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 169690 closing signal SIGTERM +W0621 21:30:43.466000 2088172 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2088301 closing signal SIGTERM +W0621 21:30:43.469000 2088172 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2088303 closing signal SIGTERM +W0621 21:30:43.469000 2088172 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2088304 closing signal SIGTERM +W0621 21:30:43.470000 2088172 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2088305 closing signal SIGTERM +W0621 21:30:43.471000 2088172 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2088306 closing signal SIGTERM +W0621 21:30:43.471000 2088172 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2088307 closing signal SIGTERM +W0621 21:30:43.472000 2088172 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 2088308 closing signal SIGTERM +W0621 21:30:43.502000 3389499 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3389631 closing signal SIGTERM +W0621 21:30:43.503000 3389499 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3389632 closing signal SIGTERM +W0621 21:30:43.503000 3389499 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3389633 closing signal SIGTERM +W0621 21:30:43.503000 3389499 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3389634 closing signal SIGTERM +W0621 21:30:43.504000 3389499 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3389635 closing signal SIGTERM +W0621 21:30:43.504000 3389499 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3389636 closing signal SIGTERM +W0621 21:30:43.504000 3389499 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3389637 closing signal SIGTERM +W0621 21:30:43.586000 3458403 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3458608 closing signal SIGTERM +W0621 21:30:43.587000 3458403 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3458609 closing signal SIGTERM +W0621 21:30:43.587000 3458403 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3458612 closing signal SIGTERM +W0621 21:30:43.587000 3458403 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3458613 closing signal SIGTERM +W0621 21:30:43.588000 3458403 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3458614 closing signal SIGTERM +W0621 21:30:43.588000 3458403 site-packages/torch/distributed/elastic/multiprocessing/api.py:900] Sending process 3458615 closing signal SIGTERM +E0621 21:30:43.666000 169552 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 3 (pid: 169687) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + main() + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + return arg(*args, **kwargs) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: +[1]: + time : 2025-06-21_21:30:43 + host : fs-mbz-gpu-852 + rank : 7 (local_rank: 7) + exitcode : 1 (pid: 169691) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:30:43 + host : fs-mbz-gpu-852 + rank : 3 (local_rank: 3) + exitcode : 1 (pid: 169687) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ ++ set +x +[W621 21:30:44.931584703 TCPStore.cpp:115] [c10d] recvVector failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-870]:35550, remote=[fs-mbz-gpu-852]:29500): failed to recv, got 0 bytes +Exception raised from recvBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:678 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x1460259785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14600e85aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa0d0 (0x14600e85c0d0 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5baa81d (0x14600e85c81d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: + 0x5bab4a9 (0x14600e85d4a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::compareSet(std::__cxx11::basic_string, std::allocator > const&, std::vector > const&, std::vector > const&) + 0x1fb (0x14600e8574cb in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: + 0xc0f919 (0x14601db8b919 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #7: + 0x37f17d (0x14601d2fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #16: + 0x94ac3 (0x1460269f2ac3 in /lib/x86_64-linux-gnu/libc.so.6) +frame #17: + 0x126850 (0x146026a84850 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:30:44.020000 3458403 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1341] The node 'fs-mbz-gpu-870_3458403_0' has failed to send a keep-alive heartbeat to the rendezvous '343213' due to an error of type RendezvousConnectionError. +E0621 21:30:44.116000 3458403 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 2 (pid: 3458610) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 +[W621 21:30:44.034067663 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-870]:35550, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x1460259785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14600e85aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14600e85c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x14600e85db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::compareSet(std::__cxx11::basic_string, std::allocator > const&, std::vector > const&, std::vector > const&) + 0x299 (0x14600e857569 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: + 0xc0f919 (0x14601db8b919 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #6: + 0x37f17d (0x14601d2fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #24: + 0x29d90 (0x146026987d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #25: __libc_start_main + 0x80 (0x146026987e40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:30:44.127000 3458403 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-870_3458403_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. +[W621 21:30:44.046296855 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-870]:35550, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x1460259785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14600e85aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14600e85c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +[W621 21:30:44.501207018 TCPStore.cpp:115] [c10d] recvVector failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-881]:56612, remote=[fs-mbz-gpu-852]:29500): failed to recv, got 0 bytes +Exception raised from recvBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:678 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14e4ac1785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14e49545aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa0d0 (0x14e49545c0d0 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x14600e85db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::compareSet(std::__cxx11::basic_string, std::allocator > const&, std::vector > const&, std::vector > const&) + 0x299 (0x14600e857569 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: + 0xc0f919 (0x14601db8b919 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #6: + 0x37f17d (0x14601d2fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #3: + 0x5baa81d (0x14e49545c81d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: + 0x5bab4a9 (0x14e49545d4a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::compareSet(std::__cxx11::basic_string, std::allocator > const&, std::vector > const&, std::vector > const&) + 0x1fb (0x14e4954574cb in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: + 0xc0f919 (0x14e4a478b919 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #24: + 0x29d90 (0x146026987d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #25: __libc_start_main + 0x80 (0x146026987e40 in /lib/x86_64-linux-gnu/libc.so.6) + +E0621 21:30:44.137000 2088172 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 1 (pid: 2088302) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 +frame #7: + 0x37f17d (0x14e4a3efb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #16: + 0x94ac3 (0x14e4ad4c5ac3 in /lib/x86_64-linux-gnu/libc.so.6) +frame #17: + 0x126850 (0x14e4ad557850 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:30:44.137000 3458403 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-870_3458403_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. +W0621 21:30:44.136000 3389499 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1341] The node 'fs-mbz-gpu-881_3389499_0' has failed to send a keep-alive heartbeat to the rendezvous '343213' due to an error of type RendezvousConnectionError. +[W621 21:30:44.055360943 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-870]:35550, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x1460259785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14600e85aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14600e85c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #3: + 0x5babb3e (0x14600e85db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::compareSet(std::__cxx11::basic_string, std::allocator > const&, std::vector > const&, std::vector > const&) + 0x299 (0x14600e857569 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: + 0xc0f919 (0x14601db8b919 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #6: + 0x37f17d (0x14601d2fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #24: + 0x29d90 (0x146026987d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #25: __libc_start_main + 0x80 (0x146026987e40 in /lib/x86_64-linux-gnu/libc.so.6) + +W0621 21:30:44.146000 3458403 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-870_3458403_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in +[W621 21:30:44.584775350 TCPStore.cpp:115] [c10d] recvVector failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-901]:36028, remote=[fs-mbz-gpu-852]:29500): failed to recv, got 0 bytes +Exception raised from recvBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:678 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14bf32b785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14bf1be5aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa0d0 (0x14bf1be5c0d0 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) + main() +frame #3: + 0x5baa81d (0x14bf1be5c81d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: + 0x5bab4a9 (0x14bf1be5d4a9 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: c10d::TCPStore::compareSet(std::__cxx11::basic_string, std::allocator > const&, std::vector > const&, std::vector > const&) + 0x1fb (0x14bf1be574cb in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #6: + 0xc0f919 (0x14bf2b18b919 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +frame #7: + 0x37f17d (0x14bf2a8fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +frame #25: + 0x29d90 (0x14bf33eb9d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #26: __libc_start_main + 0x80 (0x14bf33eb9e40 in /lib/x86_64-linux-gnu/libc.so.6) + + return arg(*args, **kwargs) +W0621 21:30:44.156000 2088172 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-901_2088172_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main +[W621 21:30:44.596489833 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-901]:36028, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14bf32b785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14bf1be5aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14bf1be5c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +E0621 21:30:44.164000 3389499 site-packages/torch/distributed/elastic/multiprocessing/api.py:874] failed (exitcode: 1) local_rank: 7 (pid: 3389638) of binary: /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/bin/python3 + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch +frame #3: + 0x5babb3e (0x14bf1be5db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::compareSet(std::__cxx11::basic_string, std::allocator > const&, std::vector > const&, std::vector > const&) + 0x299 (0x14bf1be57569 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: + 0xc0f919 (0x14bf2b18b919 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #6: + 0x37f17d (0x14bf2a8fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run +frame #24: + 0x29d90 (0x14bf33eb9d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #25: __libc_start_main + 0x80 (0x14bf33eb9e40 in /lib/x86_64-linux-gnu/libc.so.6) + + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + return launch_agent(self._config, self._entrypoint, list(args)) +[W621 21:30:44.534728349 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-881]:56612, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14e4ac1785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14e49545aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14e49545c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent +W0621 21:30:44.169000 2088172 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-901_2088172_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. +frame #3: + 0x5babb3e (0x14e49545db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::compareSet(std::__cxx11::basic_string, std::allocator > const&, std::vector > const&, std::vector > const&) + 0x299 (0x14e495457569 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: + 0xc0f919 (0x14e4a478b919 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #6: + 0x37f17d (0x14e4a3efb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: +[1]: + time : 2025-06-21_21:30:43 + host : fs-mbz-gpu-870 + rank : 11 (local_rank: 3) + exitcode : 1 (pid: 3458611) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:30:43 + host : fs-mbz-gpu-870 + rank : 10 (local_rank: 2) + exitcode : 1 (pid: 3458610) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ +frame #24: + 0x29d90 (0x14e4ad45ad90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #25: __libc_start_main + 0x80 (0x14e4ad45ae40 in /lib/x86_64-linux-gnu/libc.so.6) + +[W621 21:30:44.607899983 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-901]:36028, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14bf32b785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14bf1be5aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14bf1be5c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +W0621 21:30:44.175000 3389499 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-881_3389499_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. +frame #3: + 0x5babb3e (0x14bf1be5db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::compareSet(std::__cxx11::basic_string, std::allocator > const&, std::vector > const&, std::vector > const&) + 0x299 (0x14bf1be57569 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: + 0xc0f919 (0x14bf2b18b919 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #6: + 0x37f17d (0x14bf2a8fb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +[W621 21:30:44.546891407 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-881]:56612, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14e4ac1785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14e49545aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14e49545c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #24: + 0x29d90 (0x14bf33eb9d90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #25: __libc_start_main + 0x80 (0x14bf33eb9e40 in /lib/x86_64-linux-gnu/libc.so.6) + +frame #3: + 0x5babb3e (0x14e49545db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::compareSet(std::__cxx11::basic_string, std::allocator > const&, std::vector > const&, std::vector > const&) + 0x299 (0x14e495457569 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: + 0xc0f919 (0x14e4a478b919 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #6: + 0x37f17d (0x14e4a3efb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + +W0621 21:30:44.178000 2088172 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-901_2088172_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. +frame #24: + 0x29d90 (0x14e4ad45ad90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #25: __libc_start_main + 0x80 (0x14e4ad45ae40 in /lib/x86_64-linux-gnu/libc.so.6) + +Traceback (most recent call last): +W0621 21:30:44.185000 3389499 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-881_3389499_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. + File "", line 198, in _run_module_as_main +[W621 21:30:44.555749236 TCPStore.cpp:106] [c10d] sendBytes failed on SocketImpl(fd=3, addr=[fs-mbz-gpu-881]:56612, remote=[fs-mbz-gpu-852]:29500): Broken pipe +Exception raised from sendBytes at /pytorch/torch/csrc/distributed/c10d/Utils.hpp:653 (most recent call first): +frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string, std::allocator >) + 0x98 (0x14e4ac1785e8 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libc10.so) +frame #1: + 0x5ba8afe (0x14e49545aafe in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #2: + 0x5baa358 (0x14e49545c358 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in +frame #3: + 0x5babb3e (0x14e49545db3e in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #4: c10d::TCPStore::compareSet(std::__cxx11::basic_string, std::allocator > const&, std::vector > const&, std::vector > const&) + 0x299 (0x14e495457569 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_cpu.so) +frame #5: + 0xc0f919 (0x14e4a478b919 in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) +frame #6: + 0x37f17d (0x14e4a3efb17d in /mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/lib/libtorch_python.so) + + main() +frame #24: + 0x29d90 (0x14e4ad45ad90 in /lib/x86_64-linux-gnu/libc.so.6) +frame #25: __libc_start_main + 0x80 (0x14e4ad45ae40 in /lib/x86_64-linux-gnu/libc.so.6) + + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper +W0621 21:30:44.194000 3389499 site-packages/torch/distributed/elastic/rendezvous/dynamic_rendezvous.py:1292] The node 'fs-mbz-gpu-881_3389499_0' has failed to shutdown the rendezvous '343213' due to an error of type RendezvousConnectionError. + return arg(*args, **kwargs) +Traceback (most recent call last): + File "", line 198, in _run_module_as_main + File "", line 88, in _run_code + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 207, in + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + main() + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/typing_extensions.py", line 3253, in wrapper + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + return arg(*args, **kwargs) + run(args) + ^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 203, in main + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + launch(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py", line 188, in launch + return launch_agent(self._config, self._entrypoint, list(args)) + run(args) + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/run.py", line 883, in run + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + elastic_launch( + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 139, in __call__ + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:30:43 + host : fs-mbz-gpu-901 + rank : 25 (local_rank: 1) + exitcode : 1 (pid: 2088302) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html + return launch_agent(self._config, self._entrypoint, list(args)) +============================================================ + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launcher/api.py", line 270, in launch_agent + raise ChildFailedError( +torch.distributed.elastic.multiprocessing.errors.ChildFailedError: +============================================================ +./pretrain_gpt_profile.py FAILED +------------------------------------------------------------ +Failures: + +------------------------------------------------------------ +Root Cause (first observed failure): +[0]: + time : 2025-06-21_21:30:43 + host : fs-mbz-gpu-881 + rank : 23 (local_rank: 7) + exitcode : 1 (pid: 3389638) + error_file: + traceback : To enable traceback see: https://pytorch.org/docs/stable/elastic/errors.html +============================================================ ++ set +x ++ set +x ++ set +x ++ for ctx_length in 1024 2048 4096 8192 12288 16384 24576 32768 40960 49152 65536 81920 98304 131072 ++ export PROF_CTX_LENGTH=32768 ++ PROF_CTX_LENGTH=32768 ++ name='/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L32768*tp4.cp8.bs1.json' ++ '[' -f '/mnt/sharefs/users/hao.zhang/junda/megatron-prof-data--unstable-v5/mytrace.L32768*tp4.cp8.bs1.json' ']' ++ echo 'Running ctx_length=32768, TP_SIZE=4, CP_SIZE=8, BATCH_SIZE=1' ++ srun bash ./attnserver.sh ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 3 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 32768 --max-position-embeddings 32768 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 0 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 32768 --max-position-embeddings 32768 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 2 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 32768 --max-position-embeddings 32768 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ ++ which python3 ++ python3 -m torch.distributed.launch --nproc_per_node 8 --nnodes 4 --node_rank 1 --rdzv_id 343213 --rdzv_backend c10d --rdzv_endpoint fs-mbz-gpu-852:29500 ./pretrain_gpt_profile.py --tensor-model-parallel-size 4 --context-parallel-size 8 --num-layers 2 --hidden-size 4096 --num-attention-heads 64 --group-query-attention --num-query-groups 16 --seq-length 32768 --max-position-embeddings 32768 --micro-batch-size 1 --global-batch-size 1 --lr 0.0005 --train-iters 10 --lr-decay-iters 150000 --lr-decay-style cosine --lr-warmup-iters 2 --weight-decay .1 --adam-beta2 .999 --fp16 --log-interval 1 --save-interval 16 --eval-interval 16 --eval-iters 1 --vocab-file vocab.json --merge-file merges.txt --save gpt-checkpoint --load gpt-checkpoint --logging-level 0 --mock-data --tensorboard-dir tensorboard-logs/ +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:30:48.094000 171510 site-packages/torch/distributed/run.py:766] +W0621 21:30:48.094000 171510 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:30:48.094000 171510 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:30:48.094000 171510 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:30:48.129000 2090074 site-packages/torch/distributed/run.py:766] +W0621 21:30:48.129000 2090074 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:30:48.129000 2090074 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:30:48.129000 2090074 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:30:48.185000 3391388 site-packages/torch/distributed/run.py:766] +W0621 21:30:48.185000 3391388 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:30:48.185000 3391388 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:30:48.185000 3391388 site-packages/torch/distributed/run.py:766] ***************************************** +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/torch/distributed/launch.py:207: FutureWarning: The module torch.distributed.launch is deprecated +and will be removed in future. Use torchrun. +Note that --use-env is set by default in torchrun. +If your script expects `--local-rank` argument to be set, please +change it to read from `os.environ['LOCAL_RANK']` instead. See +https://pytorch.org/docs/stable/distributed.html#launch-utility for +further instructions + + main() +W0621 21:30:48.256000 3460364 site-packages/torch/distributed/run.py:766] +W0621 21:30:48.256000 3460364 site-packages/torch/distributed/run.py:766] ***************************************** +W0621 21:30:48.256000 3460364 site-packages/torch/distributed/run.py:766] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0621 21:30:48.256000 3460364 site-packages/torch/distributed/run.py:766] ***************************************** +[rank3]:[W621 21:31:10.663190672 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 3] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank31]:[W621 21:31:10.071893086 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 31] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank27]:[W621 21:31:10.072587542 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 27] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank23]:[W621 21:31:10.005031101 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 23] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank19]:[W621 21:31:10.005666279 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 19] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank11]:[W621 21:31:10.552781256 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 11] using GPU 3 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank25]:[W621 21:31:10.074341106 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 25] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank7]:[W621 21:31:10.666459709 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 7] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank5]:[W621 21:31:10.666545463 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 5] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank1]:[W621 21:31:10.667029249 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 1] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank15]:[W621 21:31:10.554415904 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 15] using GPU 7 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank21]:[W621 21:31:10.007634322 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 21] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank9]:[W621 21:31:10.555152301 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 9] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank29]:[W621 21:31:10.079449451 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 29] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank4]:[W621 21:31:10.671723062 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 4] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank6]:[W621 21:31:10.673281828 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 6] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank2]:[W621 21:31:10.673385384 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 2] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank17]:[W621 21:31:10.013365508 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 17] using GPU 1 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank22]:[W621 21:31:10.020605544 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 22] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank20]:[W621 21:31:10.020645109 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 20] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank18]:[W621 21:31:10.021482343 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 18] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank30]:[W621 21:31:10.091679394 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 30] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank28]:[W621 21:31:10.091811618 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 28] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank26]:[W621 21:31:10.092106264 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 26] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank10]:[W621 21:31:10.573045107 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 10] using GPU 2 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank14]:[W621 21:31:10.573090528 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 14] using GPU 6 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank12]:[W621 21:31:10.574104494 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 12] using GPU 4 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank13]:[W621 21:31:10.574089828 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 13] using GPU 5 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank16]:[W621 21:31:10.105857331 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 16] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank24]:[W621 21:31:10.182183537 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 24] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank8]:[W621 21:31:10.671060536 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 8] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +[rank0]:[W621 21:31:10.800636402 ProcessGroupNCCL.cpp:4715] [PG ID 0 PG GUID 0 Rank 0] using GPU 0 as device used by this process is currently unknown. This can potentially cause a hang if this rank to GPU mapping is incorrect. You can pecify device_id in init_process_group() to force use of a particular device. +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/junda/attnserver-megatron/megatron/core/models/gpt/gpt_layer_specs.py:94: UserWarning: The fp8 argument in "get_gpt_layer_with_transformer_engine_spec" has been deprecated and will be removed soon. Please update your code accordingly. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn( +/mnt/weka/home/hao.zhang/conda/miniconda/envs/junda-attnserver/lib/python3.12/site-packages/transformer_engine/pytorch/cpu_offload.py:595: DeprecationWarning: Offloading weights is deprecated. Using offload_weights=True does not have any effect. + warnings.warn(