import os
import shutil
import argparse
from tqdm import tqdm
from itertools import product
import numpy as np
import torch
from merge import merge_dst_data


class PtCkpt(object):
    
    def __init__(self, ckpt_path, iteration, layers, experts, 
                 tp, pp, ep=None, vpp=None, num_layer_list=None):
        self.ckpt_path = ckpt_path
        self.iteration = iteration
        self.iter_path = self.get_iter_path(ckpt_path, iteration)
        self.layers = layers
        self.experts = experts
        self.tp = tp
        self.pp = pp
        self.ep = ep
        if self.ep in [0, 1]:
            self.ep = None
        self.vpp = vpp
        self.tp_ranks = list(range(self.tp))
        self.ep_ranks = [None] if self.ep is None else list(range(self.ep))
        self.pp_ranks = [None] if self.pp == 1 else list(range(self.pp))
        self.num_layer_list = num_layer_list
        if self.num_layer_list is None:
            assert self.layers % self.pp == 0
            self.num_layer_list = [self.layers // self.pp] * self.pp
        assert sum(self.num_layer_list) == self.layers
        self.calc_pprank_layeridxs()
        self.calc_layeridx_pprank()
        
    @staticmethod
    def check_mkdir(dir_path):
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)

    def get_iter_path(self, ckpt_path, iteration):
        directory = 'iter_{:07d}'.format(iteration)
        directory = os.path.join(ckpt_path, directory)
        self.check_mkdir(directory)
        return directory
        
    def calc_pprank_layeridxs(self) -> dict:
        if self.vpp is None:
            num_layer_list_ = list(np.cumsum([0] + self.num_layer_list))
            self.pprank_layeridxs = {}
            for pp_rank in range(self.pp):
                self.pprank_layeridxs[pp_rank] = list(range(num_layer_list_[pp_rank], num_layer_list_[pp_rank+1]))
        else:
            assert self.vpp > 0
            self.pprank_layeridxs = {pp_rank: [] for pp_rank in range(self.pp)}
            pp_rank = 0
            layers_used = 0
            layer = 0
            while layer < self.layers:
                if len(self.pprank_layeridxs[pp_rank]) < self.num_layer_list[pp_rank]:
                    self.pprank_layeridxs[pp_rank].append(layer)
                    layer += 1
                    layers_used += 1
                else:
                    pp_rank += 1
                    if pp_rank >= self.pp:
                        pp_rank = 0
                if layers_used >= self.vpp:
                    layers_used = 0
                    pp_rank += 1
                    if pp_rank >= self.pp:
                        pp_rank = 0
                        
    def calc_layeridx_pprank(self):
        self.layeridx_pprank = {}
        for pp_rank, layeridxs in self.pprank_layeridxs.items():
            for idx_in_pp, layer in enumerate(layeridxs):
                # self.layeridx_pprank[layer] = pp_rank  # 只需要pp_rank
                if self.vpp is None:
                    # 在pp组里面的local index
                    self.layeridx_pprank[layer] = (pp_rank, idx_in_pp)
                else:
                    # vpp state index
                    vpp_stage_rank = idx_in_pp // self.vpp
                    vpp_stage_layer_idx = idx_in_pp % self.vpp
                    self.layeridx_pprank[layer] = (pp_rank, vpp_stage_rank, vpp_stage_layer_idx)

    def get_pt_path_by_tpppep_rank(self, tp_rank, pp_rank=None, ep_rank=None):
        mp_rank_path = self.iter_path
        if pp_rank is None or self.pp == 1:
            mp_rank_path = os.path.join(mp_rank_path, f'mp_rank_{tp_rank:02d}')
        else:
            mp_rank_path = os.path.join(mp_rank_path, f'mp_rank_{tp_rank:02d}_{pp_rank:03d}')
        if ep_rank is not None and self.ep is not None:
            mp_rank_path = mp_rank_path + f'_{ep_rank:03d}'
        self.check_mkdir(mp_rank_path)
        return os.path.join(mp_rank_path, 'model_optim_rng.pt')
    
    @staticmethod
    def get_rank_maps(rank, dst_rank_n, src_rank_n, src_ckpt) -> list:
        if src_rank_n is None: # 暂时只考虑src rank
            return [None]
        if dst_rank_n is None:
            return src_ckpt.ep_ranks
        if rank is None:
            return [None]
        assert dst_rank_n <= src_rank_n # 暂时只考虑tp和ep减小的情况
        if src_rank_n == dst_rank_n:
            return [rank]
        elif src_rank_n > dst_rank_n:
            assert src_rank_n % dst_rank_n == 0
            mult = src_rank_n // dst_rank_n
            return list(range(mult*rank, mult*rank+mult))
        else:
            assert dst_rank_n % src_rank_n == 0
            mult = dst_rank_n // src_rank_n
            return [rank // mult]
        
    def get_layeridxs_by_pprank(self, pp_rank) -> list:
        return self.pprank_layeridxs[pp_rank]
    
    def get_pp_ranks_by_layeridxs(self, layeridxs) -> list:
        if self.pp == 1:
            return [None]
        return [self.layeridx_pprank[layer][0] for layer in layeridxs]
    
    def data_deleter(self, dst_tp_rank, dst_pp_rank, dst_ep_rank,
                     src_ckpt, src_data, src_tp_rank, src_pp_rank, src_ep_rank):
        dst_layeridxs = self.get_layeridxs_by_pprank(dst_pp_rank)
        src_layeridxs = src_ckpt.get_layeridxs_by_pprank(src_pp_rank)
        keep_layeridxs = [x for x in src_layeridxs if x in dst_layeridxs]
        del_layeridxs = [x for x in src_layeridxs if x not in dst_layeridxs]
        for layer in del_layeridxs:
            if src_ckpt.vpp is None:
                layer_local_idx = src_ckpt.layeridx_pprank[layer][1]
                del_key = f'decoder.layers.{layer_local_idx}'
                all_keys = list(src_data['model'].keys())
                for key in all_keys:
                    if key.startswith(del_key):
                        del src_data['model'][key]
            else:
                vpp_stage_rank, layer_local_idx = src_ckpt.layeridx_pprank[layer][1:]
                vpp_stage_key = f'model{vpp_stage_rank}'
                del_key = f'decoder.layers.{layer_local_idx}'
                all_keys = list(src_data[vpp_stage_key].keys())
                for key in all_keys:
                    if key.startswith(del_key):
                        del src_data[vpp_stage_key][key]
        return keep_layeridxs, src_data
    
    def get_dst_data_0(self, tp_rank, pp_rank, ep_rank, src_ckpt) -> dict:
        src_tp_ranks = self.get_rank_maps(tp_rank, self.tp, src_ckpt.tp, src_ckpt)
        src_ep_ranks = self.get_rank_maps(ep_rank, self.ep, src_ckpt.ep, src_ckpt)
        layeridxs = self.get_layeridxs_by_pprank(pp_rank)
        src_pp_ranks = src_ckpt.get_pp_ranks_by_layeridxs(layeridxs)
        src_pp_ranks = list(set(src_pp_ranks))
        src_data = {}
        nfiles = len(src_tp_ranks) * len(src_pp_ranks) * len(src_ep_ranks)
        print(f'get src weight for dst tp_rank: {tp_rank}, pp_rank: {pp_rank}, ep_rank: {ep_rank}')
        for tp_rank_, pp_rank_, ep_rank_ in tqdm(product(src_tp_ranks, src_pp_ranks, src_ep_ranks), total=nfiles):
            src_path = src_ckpt.get_pt_path_by_tpppep_rank(tp_rank_, pp_rank_, ep_rank_)
            src_data_ = torch.load(src_path, map_location='cpu')
            keep_layeridxs, src_data_ = self.data_deleter(
                tp_rank, pp_rank, ep_rank,
                src_ckpt, src_data_, tp_rank_, pp_rank_, ep_rank_)
            src_data[(tuple(keep_layeridxs), pp_rank_, tp_rank_, ep_rank_)] = src_data_
        return src_data, layeridxs, src_tp_ranks, src_ep_ranks
    
    def data_filter(self, dst_tp_rank, dst_pp_rank, dst_ep_rank,
                    src_ckpt, src_data, src_tp_rank, src_pp_rank, src_ep_rank):
        dst_layeridxs = self.get_layeridxs_by_pprank(dst_pp_rank)
        src_layeridxs = src_ckpt.get_layeridxs_by_pprank(src_pp_rank)
        keep_layeridxs = [x for x in src_layeridxs if x in dst_layeridxs]
        dst_data = {}
        embd_keys = ['embedding.']
        post_keys = ['decoder.final_layernorm.', 'output_layer.']
        for layer in keep_layeridxs:
            dst_weights = {}
            if src_ckpt.vpp is None:
                layer_local_idx = src_ckpt.layeridx_pprank[layer][1]
                keep_key = f'decoder.layers.{layer_local_idx}'
                all_keys = list(src_data['model'].keys())
                for key in all_keys:
                    if key.startswith(keep_key):
                        dst_weights[key.replace(f'layers.{layer_local_idx}', f'layers.{layer}')] = src_data['model'].pop(key)
                    if src_pp_rank == 0:
                        for embd_key in embd_keys:
                            if key.startswith(embd_key):
                                dst_weights[key] = src_data['model'].pop(key)
                    if src_pp_rank == src_ckpt.pp-1:
                        for post_key in post_keys:
                            if key.startswith(post_key):
                                dst_weights[key] = src_data['model'].pop(key)
            else:
                vpp_stage_rank, layer_local_idx = src_ckpt.layeridx_pprank[layer][1:]
                vpp_stage_key = f'model{vpp_stage_rank}'
                keep_key = f'decoder.layers.{layer_local_idx}'
                all_keys = list(src_data[vpp_stage_key].keys())
                for key in all_keys:
                    if key.startswith(keep_key):
                        dst_weights[key.replace(f'layers.{layer_local_idx}', f'layers.{layer}')] = src_data[vpp_stage_key].pop(key)
                    if src_pp_rank == 0:
                        for embd_key in embd_keys:
                            if key.startswith(embd_key):
                                dst_weights[key] = src_data[vpp_stage_key].pop(key)
                    if src_pp_rank == src_ckpt.pp-1:
                        for post_key in post_keys:
                            if key.startswith(post_key):
                                dst_weights[key] = src_data[vpp_stage_key].pop(key)
            dst_data[(layer, src_tp_rank, src_ep_rank)] = dst_weights
        return dst_data
    
    def get_dst_data(self, tp_rank, pp_rank, ep_rank, src_ckpt) -> dict:
        src_tp_ranks = self.get_rank_maps(tp_rank, self.tp, src_ckpt.tp, src_ckpt)
        src_ep_ranks = self.get_rank_maps(ep_rank, self.ep, src_ckpt.ep, src_ckpt)
        layeridxs = self.get_layeridxs_by_pprank(pp_rank)
        src_pp_ranks = src_ckpt.get_pp_ranks_by_layeridxs(layeridxs)
        src_pp_ranks = list(set(src_pp_ranks))
        src_data = {}
        nfiles = len(src_tp_ranks) * len(src_pp_ranks) * len(src_ep_ranks)
        print(f'get src weight for dst tp_rank: {tp_rank}, pp_rank: {pp_rank}, ep_rank: {ep_rank}')
        for tp_rank_, pp_rank_, ep_rank_ in tqdm(product(src_tp_ranks, src_pp_ranks, src_ep_ranks), total=nfiles):
            src_path = src_ckpt.get_pt_path_by_tpppep_rank(tp_rank_, pp_rank_, ep_rank_)
            src_data_ = torch.load(src_path, map_location='cpu')
            src_data_ = self.data_filter(
                tp_rank, pp_rank, ep_rank,
                src_ckpt, src_data_, tp_rank_, pp_rank_, ep_rank_)
            src_data.update(src_data_)
        return src_data, layeridxs, src_tp_ranks, src_ep_ranks
    
    def copy_iter_info_txt(self, src_ckpt):
        shutil.copyfile(os.path.join(src_ckpt.ckpt_path, 'latest_checkpointed_iteration.txt'),
                        os.path.join(self.ckpt_path, 'latest_checkpointed_iteration.txt'))
    
    def merge_dst_data(self, src_data, layeridxs, src_tp_ranks, src_ep_ranks) -> dict:
        # 涉及每层数据以及preprocess/postprocess等特殊处理在这里面处理
        pass
    
    def rearrn_dst_data(self, dst_data):
        res = {}
        for layer, dst_weights in dst_data.items():
            if self.vpp is None:
                vpp_stage_key = 'model'
                layer_local_idx = self.layeridx_pprank[layer][1]
            else:
                vpp_stage_rank, layer_local_idx = self.layeridx_pprank[layer][1:]
                vpp_stage_key = f'model{vpp_stage_rank}'
            if not vpp_stage_key in res:
                res[vpp_stage_key] = {}
            all_keys = list(dst_weights.keys())
            for key in all_keys:
                if key.startswith(f'decoder.layers.{layer}'):
                    dst_weights[key.replace(f'layers.{layer}', f'layers.{layer_local_idx}')] = dst_weights.pop(key)
            res[vpp_stage_key].update(dst_weights)
        return res
    
    def run(self, src_ckpt):
        nfiles = len(self.tp_ranks) * len(self.pp_ranks) * len(self.ep_ranks)
        for tp_rank, pp_rank, ep_rank in tqdm(product(self.tp_ranks, self.pp_ranks, self.ep_ranks), total=nfiles):
            dst_data, layeridxs, src_tp_ranks, src_ep_ranks = self.get_dst_data(tp_rank, pp_rank, ep_rank, src_ckpt)
            dst_data = merge_dst_data(dst_data, layeridxs, src_tp_ranks, src_ep_ranks)
            dst_data = self.rearrn_dst_data(dst_data)
            dst_data['iteration'] = self.iteration
            dst_data['checkpoint_version'] = 3.0
            dst_path = self.get_pt_path_by_tpppep_rank(tp_rank, pp_rank, ep_rank)
            torch.save(dst_data, dst_path)
        self.copy_iter_info_txt(src_ckpt)
        
        
def add_src_ckpt_args(parser):
    group = parser.add_argument_group(title='src ckpt args')
    group.add_argument('--src-dir', type=str, required=True,
                       help='path of src checkpoint')
    group.add_argument('--iteration', type=int, default=1,
                       help='iteration of src checkpoint')
    group.add_argument('--layers', type=int, required=True,
                       help='number of layers')
    group.add_argument('--experts', type=int, required=True,
                       help='number of experts')
    group.add_argument('--src-tp', type=int, required=True,
                       help='tp of src checkpoint')
    group.add_argument('--src-pp', type=int, required=True,
                       help='pp of src checkpoint')
    group.add_argument('--src-ep', type=int, required=True,
                       help='ep of src checkpoint')
    group.add_argument('--src-vpp', type=int, required=None,
                       help='vpp of src checkpoint')
    group.add_argument('--src-num-layer-list', type=str,
                       help='a list of number of layers, src checkpoint,'
                       'seperated by comma; e.g., 4,4,4,4')


def add_dst_ckpt_args(parser):
    group = parser.add_argument_group(title='dst ckpt args')
    group.add_argument('--dst-dir', type=str, required=True,
                       help='path to save checkpoint')
    group.add_argument('--dst-tp', type=int, required=True,
                       help='tp of dst checkpoint')
    group.add_argument('--dst-pp', type=int, required=True,
                       help='pp of dst checkpoint')
    group.add_argument('--dst-ep', type=int, required=True,
                       help='ep of dst checkpoint')
    group.add_argument('--dst-vpp', type=int, required=None,
                       help='vpp of dst checkpoint')
    group.add_argument('--dst-num-layer-list', type=str,
                       help='a list of number of layers, dst checkpoint,'
                       'seperated by comma; e.g., 4,4,4,4')


def get_args():
    parser = argparse.ArgumentParser()
    add_src_ckpt_args(parser)
    add_dst_ckpt_args(parser)
    args = parser.parse_args()
    return args


def main():
    args = get_args()
    src_ckpt = PtCkpt(args.src_dir, args.iteration, args.layers, args.experts,
                      args.src_tp, args.src_pp, args.src_ep, args.src_vpp, args.src_num_layer_list)
    dst_ckpt = PtCkpt(args.dst_dir, args.iteration, args.layers, args.experts,
                      args.dst_tp, args.dst_pp, args.dst_ep, args.dst_vpp, args.dst_num_layer_list)
    dst_ckpt.run(src_ckpt)


if __name__ == '__main__':
    main()
    
    
    
    
    
    
    
    
    
    
    