import itertools
import collections
from typing import List, Dict
from abc import ABC, abstractmethod
from dataclasses import dataclass

from ..utils import logger
from ..utils.parallel_config import ParallelConfig


@dataclass
class Template(ABC):
    # 模板名称
    mode: str
    is_attn: bool

    def resort(self, basic_configs: List[ParallelConfig]):
        if isinstance(basic_configs, tuple):
            basic_configs = list(basic_configs)

        # 计算基础配置
        index = 0
        for i in range(len(basic_configs)):
            cfg1 = basic_configs[i]
            if self.is_attn:
                arr1 = [cfg1.tensor_model_parallel_size, cfg1.ring_attention_size, cfg1.ulysses_size, cfg1.micro_batch_size]
            else:
                arr1 = [cfg1.tensor_model_parallel_size, cfg1.ring_attention_size, cfg1.ulysses_size, cfg1.expert_model_parallel_size, cfg1.micro_batch_size]
            find = True
            for j in range(len(basic_configs)):
                cfg2 = basic_configs[j]
                if self.is_attn:
                    arr2 = [cfg2.tensor_model_parallel_size, cfg2.ring_attention_size, cfg2.ulysses_size, cfg2.micro_batch_size]
                else:
                    arr2 = [cfg2.tensor_model_parallel_size, cfg2.ring_attention_size, cfg2.ulysses_size, cfg2.expert_model_parallel_size, cfg2.micro_batch_size]
                
                cnt = sum(e1 != e2 for e1, e2 in zip(arr1, arr2))
                if cnt > 1:
                    find = False
                    break
            
            if find:
                index = i
                break

        base_config = basic_configs.pop(index)
        basic_configs.insert(0, base_config)

        # 计算compute order
        compute_order = []
        for config in basic_configs[1:]:
            if self.is_attn:
                if config.tensor_model_parallel_size != base_config.tensor_model_parallel_size:
                    compute_order.append('tp')
                
                if config.ring_attention_size != base_config.ring_attention_size:
                    compute_order.append('cp')

                if config.ulysses_size != base_config.ulysses_size:
                    compute_order.append('up')

                if config.micro_batch_size != base_config.micro_batch_size:
                    compute_order.append('mbs')
                
            else:
                if config.tensor_model_parallel_size != base_config.tensor_model_parallel_size:
                    compute_order.append('tp')
                
                if config.ring_attention_size * config.ulysses_size != base_config.ring_attention_size * base_config.ulysses_size:
                    if 'cup' in compute_order:
                        continue
                    else:
                        compute_order.append('cup')
                
                if config.expert_model_parallel_size != base_config.expert_model_parallel_size:
                    compute_order.append('ep')
                
                if config.micro_batch_size != base_config.micro_batch_size:
                    compute_order.append('mbs')

        return basic_configs, compute_order


    @abstractmethod
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        raise NotImplementedError('This method should be overridden by subclasses')
    
    @staticmethod
    def factory(mode, is_attn):
        if mode == 'TP_EP':
            return TpAndEpTemplate(mode, is_attn)
        if mode == 'TP':
            return TpTemplate(mode, is_attn)
        if mode == 'EP':
            return EpTemplate(mode, is_attn)
        if mode == 'MBS':
            return MbsTemplate(mode, is_attn)
        if mode == 'TP_MBS':
            return TpAndMbsTemplate(mode, is_attn)
        if mode == 'EP_MBS':
            return EpAndMbsTemplate(mode, is_attn)
        if mode == 'TP_EP_MBS':
            return TpAndEpAndMbsTemplate(mode, is_attn)
        if mode == 'CUP':
            return CUpTemplate(mode, is_attn)
        if mode == 'TP_CUP':
            return TpAndCpAndUpTemplate(mode, is_attn)
        if mode == 'CUP_EP':
            return CUpAndEpTemplate(mode, is_attn)
        if mode == 'CUP_MBS':
            return CpAndUpAndMbsTemplate(mode, is_attn)
        if mode == 'TP_CUP_EP':
            return TpAndCUpAndEpTemplate(mode, is_attn)
        if mode == 'TP_CUP_MBS':
            return TpAndCpAndUPAndMbsTemplate(mode, is_attn)
        if mode == 'CUP_EP_MBS':
            return CUpAndEpAndMbsTemplate(mode, is_attn)
        if mode == 'TP_CUP_EP_MBS':
            return TpAndCUpAndEpAndMbsTemplate(mode, is_attn)
        if mode == 'CP':
            return CpTemplate(mode, is_attn)
        if mode == 'UP':
            return UpTemplate(mode, is_attn)
        if mode == 'TP_CP':
            return TpAndCpTemplate(mode, is_attn)
        if mode == 'TP_UP':
            return TpAndUpTemplate(mode, is_attn)
        if mode == 'TP_MBS':
            return TpAndMbsTemplate(mode, is_attn)
        if mode == 'CP_UP':
            return CpAndUpTemplate(mode, is_attn)
        if mode == 'CP_MBS':
            return CpAndMbsTemplate(mode, is_attn)
        if mode == 'UP_MBS':
            return UpAndMbsTemplate(mode, is_attn)
        if mode == 'TP_CP_UP':
            return TpAndCpAndUpTemplate(mode, is_attn)
        if mode == 'TP_CP_MBS':
            return TpAndCpAndMbsTemplate(mode, is_attn)
        if mode == 'TP_UP_MBS':
            return TpAndUpAndMbsTemplate(mode, is_attn)
        if mode == 'CP_UP_MBS':
            return CpAndUpAndMbsTemplate(mode, is_attn)
        if mode == 'TP_CP_UP_MBS':
            return TpAndCpAndUPAndMbsTemplate(mode, is_attn)
        
        raise AssertionError(f"can't find matched template, mode is {mode}, is_attn = {is_attn}")

    @staticmethod
    def select_mode(config: ParallelConfig, is_attn):
        res = []

        if is_attn:
            if config.tensor_model_parallel_size > 1:
                res.append('TP')
            if config.ring_attention_size > 1:
                res.append('CP')
            if config.ulysses_size > 1:
                res.append('UP')
            if config.micro_batch_size > 1:
                res.append('MBS')
            if config.tensor_model_parallel_size == 1 \
                and config.ring_attention_size == 1 \
                and config.ulysses_size == 1 \
                and config.micro_batch_size == 1:
                res.append('MBS')
        else:
            if config.tensor_model_parallel_size > 1:
                res.append('TP')
            if config.ring_attention_size > 1 or config.ulysses_size > 1:
                res.append('CUP')
            if config.expert_model_parallel_size > 1:
                res.append('EP')
            if config.micro_batch_size > 1:
                res.append('MBS')
            if config.tensor_model_parallel_size == 1 \
                and config.ring_attention_size == 1 \
                and config.ulysses_size == 1 \
                and config.expert_model_parallel_size <= 1 \
                and config.micro_batch_size == 1:
                res.append('MBS')
        return '_'.join(res)

    @staticmethod
    def exist(search_spaces: List[ParallelConfig], config: ParallelConfig):
        pattern = 'TP{}_CP{}_UP{}_EP{}_MBS{}'
        config_arr = config.to_list()
        config_str = pattern.format(config_arr[1], config_arr[3], config_arr[4], config_arr[7], config_arr[5])

        exist = False
        for cfg in search_spaces:
            cfg_arr = cfg.to_list()
            cfg_str = pattern.format(cfg_arr[1], cfg_arr[3], cfg_arr[4], cfg_arr[7], cfg_arr[5])
            if config_str == cfg_str:
                exist = True
                break
        return exist

    @staticmethod
    def set_all_template(search_space: List[ParallelConfig], is_attn: bool):
        # 用于生成模板的基础配置
        # key: 模板名
        # value: 生成模板需要的基础并行配置Profilling
        basic_configs: Dict[str, List[ParallelConfig]] = {}
        compute_orders: Dict[str, list] = {}
        
        # 没有与任何模板匹配的并行配置
        unmatched_configs: List[ParallelConfig] = []

        search_space = sorted(search_space, key=lambda x: (x.tensor_model_parallel_size, 
                                                           x.ring_attention_size, 
                                                           x.ulysses_size, 
                                                           x.expert_model_parallel_size), reverse=True)
        
        for config in search_space:
            # 获取config需要的模板名
            mode = Template.select_mode(config, is_attn)
            if not mode:
                continue

            # 如果config已经有模板可以推导，则不需要添加新的模板
            if mode in basic_configs.keys():
                logger.info(f"{config} match {mode}")
                continue

            # 添加新的模板
            template = Template.factory(mode, is_attn)
            matched_configs = template.get_basic_configs(search_space, config)
            if len(matched_configs) == 0:
                logger.warning(f"no enough basic configs to generate template: {mode}")
                unmatched_configs.append(config)
                continue

            logger.info(f"{config} add new template({mode}) success")
            matched_configs, comp_order = template.resort(matched_configs)
            basic_configs[mode] = matched_configs
            compute_orders[mode] = comp_order

        return basic_configs, compute_orders, unmatched_configs
            

class TpAndEpTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.micro_batch_size == 1 \
                and config.ring_attention_size * config.ulysses_size == 1 \
                and config.tensor_model_parallel_size > 1 \
                and config.expert_model_parallel_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 3):
            if not cur_config in it:
                continue

            all_tp = [x.tensor_model_parallel_size for x in it]
            all_ep = [x.expert_model_parallel_size for x in it]
            same_tp_cnt = collections.Counter(all_tp)
            same_ep_cnt = collections.Counter(all_ep)
            if max(same_tp_cnt.values()) == 2 and max(same_ep_cnt.values()) == 2:
                basic_configs = it
                break

        return basic_configs


class TpTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []
        
        temp_search_spaces = []
        if self.is_attn:
            for config in search_space:
                if config.micro_batch_size == 1 \
                    and config.ring_attention_size == 1 \
                    and config.ulysses_size == 1 \
                    and config.tensor_model_parallel_size > 1 \
                    and not Template.exist(temp_search_spaces, config):
                    temp_search_spaces.append(config)
        else:
            for config in search_space:
                if config.micro_batch_size == 1 \
                    and config.ring_attention_size * config.ulysses_size == 1 \
                    and config.expert_model_parallel_size <= 1 \
                    and config.tensor_model_parallel_size > 1 \
                    and not Template.exist(temp_search_spaces, config):
                    temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 2):
            if not cur_config in it:
                continue

            if it[0].tensor_model_parallel_size != it[1].tensor_model_parallel_size:
                basic_configs = it
                break

        return basic_configs


class EpTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.micro_batch_size == 1 \
                and config.ring_attention_size * config.ulysses_size == 1 \
                and config.tensor_model_parallel_size == 1 \
                and config.expert_model_parallel_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 2):
            if not cur_config in it:
                continue

            if it[0].expert_model_parallel_size != it[1].expert_model_parallel_size:
                basic_configs = it
                break

        return basic_configs
    

class MbsTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        if self.is_attn:
            for config in search_space:
                if config.tensor_model_parallel_size == 1 \
                    and config.ring_attention_size == 1 \
                    and config.ulysses_size == 1 \
                    and not Template.exist(temp_search_spaces, config):
                    temp_search_spaces.append(config)
        else:
            for config in search_space:
                if config.tensor_model_parallel_size == 1 \
                    and config.ring_attention_size * config.ulysses_size == 1 \
                    and config.expert_model_parallel_size <= 1 \
                    and not Template.exist(temp_search_spaces, config):
                    temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 2):
            if not cur_config in it:
                continue

            if it[0].micro_batch_size != it[1].micro_batch_size:
                basic_configs = it
                break

        return basic_configs


class TpAndMbsTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        if self.is_attn:
            for config in search_space:
                if config.ring_attention_size == 1 \
                    and config.ulysses_size == 1 \
                    and config.tensor_model_parallel_size > 1 \
                    and not Template.exist(temp_search_spaces, config):
                    temp_search_spaces.append(config)
        else:
            for config in search_space:
                if config.expert_model_parallel_size <= 1 \
                    and config.ring_attention_size * config.ulysses_size == 1 \
                    and config.tensor_model_parallel_size > 1 \
                    and not Template.exist(temp_search_spaces, config):
                    temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 3):

            if not cur_config in it:
                continue

            all_tp = [x.tensor_model_parallel_size for x in it]
            all_mbs = [x.micro_batch_size for x in it]

            same_tp_cnt = collections.Counter(all_tp)
            same_mbs_cnt = collections.Counter(all_mbs)

            if max(same_tp_cnt.values()) == 2 and max(same_mbs_cnt.values()) == 2:
                basic_configs = it
                break

        return basic_configs


class EpAndMbsTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.tensor_model_parallel_size == 1 \
                and config.ring_attention_size * config.ulysses_size == 1 \
                and config.expert_model_parallel_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 3):
            if not cur_config in it:
                continue

            all_ep = [x.expert_model_parallel_size for x in it]
            all_mbs = [x.micro_batch_size for x in it]

            same_ep_cnt = collections.Counter(all_ep)
            same_mbs_cnt = collections.Counter(all_mbs)
            if max(same_ep_cnt.values()) == 2 and max(same_mbs_cnt.values()) == 2:
                basic_configs = it
                break

        return basic_configs


class TpAndEpAndMbsTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.tensor_model_parallel_size > 1 \
                and config.ring_attention_size * config.ulysses_size == 1 \
                and config.expert_model_parallel_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 4):
            if not cur_config in it:
                continue

            all_tp = [x.tensor_model_parallel_size for x in it]
            all_ep = [x.expert_model_parallel_size for x in it]
            all_mbs = [x.micro_batch_size for x in it]

            same_tp_cnt = collections.Counter(all_tp)
            same_ep_cnt = collections.Counter(all_ep)
            same_mbs_cnt = collections.Counter(all_mbs)
            if max(same_tp_cnt.values()) == 3 and max(same_ep_cnt.values()) == 3 and max(same_mbs_cnt.values()) == 3:
                basic_configs = it
                break

        return basic_configs
    

class CUpTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.tensor_model_parallel_size == 1 \
                and config.expert_model_parallel_size <= 1 \
                and config.micro_batch_size == 1 \
                and config.ring_attention_size > 1 \
                and config.ulysses_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 3):
            if not cur_config in it:
                continue

            all_cp = [x.ring_attention_size for x in it]
            all_up = [x.ulysses_size for x in it]

            same_cp_cnt = collections.Counter(all_cp)
            same_up_cnt = collections.Counter(all_up)
            if max(same_cp_cnt.values()) == 2 and max(same_up_cnt.values()) == 2:
                basic_configs = it
                break

        return basic_configs


class CUpAndEpTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.tensor_model_parallel_size == 1 \
                and config.micro_batch_size == 1 \
                and config.ring_attention_size > 1 \
                and config.ulysses_size > 1 \
                and config.expert_model_parallel_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 4):
            if not cur_config in it:
                continue

            all_cp = [x.ring_attention_size for x in it]
            all_up = [x.ulysses_size for x in it]
            all_ep = [x.expert_model_parallel_size for x in it]

            same_cp_cnt = collections.Counter(all_cp)
            same_up_cnt = collections.Counter(all_up)
            same_ep_cnt = collections.Counter(all_ep)
            if max(same_cp_cnt.values()) == 3 and max(same_up_cnt.values()) == 3 and max(same_ep_cnt.values()) == 3:
                basic_configs = it
                break

        return basic_configs


class TpAndCUpAndEpTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.micro_batch_size == 1 \
                and config.tensor_model_parallel_size > 1 \
                and config.ring_attention_size > 1 \
                and config.ulysses_size > 1 \
                and config.expert_model_parallel_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 5):
            if not cur_config in it:
                continue

            all_tp = [x.tensor_model_parallel_size for x in it]
            all_cp = [x.ring_attention_size for x in it]
            all_up = [x.ulysses_size for x in it]
            all_ep = [x.expert_model_parallel_size for x in it]

            same_tp_cnt = collections.Counter(all_tp)
            same_cp_cnt = collections.Counter(all_cp)
            same_up_cnt = collections.Counter(all_up)
            same_ep_cnt = collections.Counter(all_ep)
            if max(same_tp_cnt.values()) == 4 and max(same_cp_cnt.values()) == 4 and max(same_up_cnt.values()) == 4 and max(same_ep_cnt.values()) == 4:
                basic_configs = it
                break

        return basic_configs


class CUpAndEpAndMbsTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.tensor_model_parallel_size == 1 \
                and config.expert_model_parallel_size > 1 \
                and config.ring_attention_size > 1 \
                and config.ulysses_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 5):
            if not cur_config in it:
                continue

            all_cp = [x.ring_attention_size for x in it]
            all_up = [x.ulysses_size for x in it]
            all_ep = [x.expert_model_parallel_size for x in it]
            all_mbs = [x.micro_batch_size for x in it]

            same_cp_cnt = collections.Counter(all_cp)
            same_up_cnt = collections.Counter(all_up)
            same_ep_cnt = collections.Counter(all_ep)
            same_mbs_cnt = collections.Counter(all_mbs)
            if max(same_cp_cnt.values()) == 4 and max(same_up_cnt.values()) == 4 and max(same_ep_cnt.values()) == 4 and max(same_mbs_cnt.values()) == 4:
                basic_configs = it
                break

        return basic_configs

class TpAndCUpAndEpAndMbsTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.tensor_model_parallel_size > 1 \
                and config.expert_model_parallel_size > 1 \
                and config.ring_attention_size > 1 \
                and config.ulysses_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 6):
            if not cur_config in it:
                continue

            all_tp = [x.tensor_model_parallel_size for x in it]
            all_cp = [x.ring_attention_size for x in it]
            all_up = [x.ulysses_size for x in it]
            all_ep = [x.expert_model_parallel_size for x in it]
            all_mbs = [x.micro_batch_size for x in it]

            same_tp_cnt = collections.Counter(all_tp)
            same_cp_cnt = collections.Counter(all_cp)
            same_up_cnt = collections.Counter(all_up)
            same_ep_cnt = collections.Counter(all_ep)
            same_mbs_cnt = collections.Counter(all_mbs)
            if max(same_tp_cnt.values()) == 5 and max(same_cp_cnt.values()) == 5 and max(same_up_cnt.values()) == 5 and max(same_ep_cnt.values()) == 5 and max(same_mbs_cnt.values()) == 5:
                basic_configs = it
                break

        return basic_configs    


class CpTemplate(Template):
    def get_basic_configs(self, search_space, cur_config):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.micro_batch_size == 1 \
                and config.ulysses_size == 1 \
                and config.tensor_model_parallel_size == 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 2):
            if not cur_config in it:
                continue

            if it[0].ring_attention_size != it[1].ring_attention_size:
                basic_configs = it
                break

        return basic_configs

class UpTemplate(Template):
    def get_basic_configs(self, search_space, cur_config):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.micro_batch_size == 1 \
                and config.ring_attention_size == 1 \
                and config.tensor_model_parallel_size == 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 2):
            if not cur_config in it:
                continue

            if it[0].ulysses_size != it[1].ulysses_size:
                basic_configs = it
                break

        return basic_configs

class TpAndCpTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.ulysses_size == 1 \
                and config.micro_batch_size == 1 \
                and config.ring_attention_size > 1 \
                and config.tensor_model_parallel_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 3):
            if not cur_config in it:
                continue

            all_tp = [x.tensor_model_parallel_size for x in it]
            all_cp = [x.ring_attention_size for x in it]

            same_tp_cnt = collections.Counter(all_tp)
            same_cp_cnt = collections.Counter(all_cp)
            if max(same_tp_cnt.values()) == 2 and max(same_cp_cnt.values()) == 2:
                basic_configs = it
                break

        return basic_configs

class TpAndUpTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.ring_attention_size == 1 \
                and config.micro_batch_size == 1 \
                and config.tensor_model_parallel_size > 1 \
                and config.ulysses_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 3):
            if not cur_config in it:
                continue

            all_tp = [x.tensor_model_parallel_size for x in it]
            all_up = [x.ulysses_size for x in it]

            same_tp_cnt = collections.Counter(all_tp)
            same_up_cnt = collections.Counter(all_up)
            if max(same_tp_cnt.values()) == 2 and max(same_up_cnt.values()) == 2:
                basic_configs = it
                break

        return basic_configs

class CpAndUpTemplate(Template):
    def get_basic_configs(self, search_space, cur_config):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.tensor_model_parallel_size == 1 \
                and config.micro_batch_size == 1 \
                and config.ring_attention_size > 1 \
                and config.ulysses_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)
        
        for it in itertools.combinations(temp_search_spaces, 3):
            if not cur_config in it:
                continue

            all_cp = [x.ring_attention_size for x in it]
            all_up = [x.ulysses_size for x in it]

            same_cp_cnt = collections.Counter(all_cp)
            same_up_cnt = collections.Counter(all_up)
            if max(same_cp_cnt.values()) == 2 and max(same_up_cnt.values()) == 2:
                basic_configs = it
                break

        return basic_configs

class CpAndMbsTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.tensor_model_parallel_size == 1 \
                and config.ulysses_size == 1 \
                and config.ring_attention_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 3):
            if not cur_config in it:
                continue

            all_cp = [x.ring_attetion_size for x in it]
            all_mbs = [x.micro_batch_size for x in it]

            same_cp_cnt = collections.Counter(all_cp)
            same_mbs_cnt = collections.Counter(all_mbs)
            if max(same_cp_cnt.values()) == 2 and max(same_mbs_cnt.values()) == 2:
                basic_configs = it
                break

        return basic_configs

class UpAndMbsTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.tensor_model_parallel_size == 1 \
                and config.ring_attention_size == 1 \
                and config.ulysses_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 3):
            if not cur_config in it:
                continue

            all_up = [x.ulysses_size for x in it]
            all_mbs = [x.micro_batch_size for x in it]

            same_up_cnt = collections.Counter(all_up)
            same_mbs_cnt = collections.Counter(all_mbs)
            if max(same_up_cnt.values()) == 2 and max(same_mbs_cnt.values()) == 2:
                basic_configs = it
                break

        return basic_configs

class TpAndCpAndUpTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.micro_batch_size == 1 \
                and config.expert_model_parallel_size <= 1 \
                and config.tensor_model_parallel_size > 1 \
                and config.ulysses_size > 1 \
                and config.ring_attention_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 4):
            if not cur_config in it:
                continue

            all_tp = [x.tensor_model_parallel_size for x in it]
            all_cp = [x.ring_attetion_size for x in it]
            all_up = [x.ulysses_size for x in it]

            same_tp_cnt = collections.Counter(all_tp)
            same_cp_cnt = collections.Counter(all_cp)
            same_up_cnt = collections.Counter(all_up)
            if max(same_tp_cnt.values()) == 3 and max(same_cp_cnt.values()) == 3 and max(same_up_cnt.values()) == 3:
                basic_configs = it
                break

        return basic_configs

class TpAndCpAndMbsTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.ulysses_size == 1 \
                and config.tensor_model_parallel_size > 1 \
                and config.ring_attention_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 4):
            if not cur_config in it:
                continue

            all_tp = [x.tensor_model_parallel_size for x in it]
            all_cp = [x.ring_attention_size for x in it]
            all_mbs = [x.micro_batch_size for x in it]

            same_tp_cnt = collections.Counter(all_tp)
            same_cp_cnt = collections.Counter(all_cp)
            same_mbs_cnt = collections.Counter(all_mbs)
            if max(same_tp_cnt.values()) == 3 and max(same_cp_cnt.values()) == 3 and max(same_mbs_cnt.values()) == 3:
                basic_configs = it
                break

        return basic_configs

class TpAndUpAndMbsTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.ring_attention_size == 1 \
                and config.tensor_model_parallel_size > 1 \
                and config.ulysses_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 4):
            if not cur_config in it:
                continue

            all_tp = [x.tensor_model_parallel_size for x in it]
            all_up = [x.ulysses_size for x in it]
            all_mbs = [x.micro_batch_size for x in it]

            same_tp_cnt = collections.Counter(all_tp)
            same_up_cnt = collections.Counter(all_up)
            same_mbs_cnt = collections.Counter(all_mbs)
            if max(same_tp_cnt.values()) == 3 and max(same_up_cnt.values()) == 3 and max(same_mbs_cnt.values()) == 3:
                basic_configs = it
                break

        return basic_configs

class CpAndUpAndMbsTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.tensor_model_parallel_size == 1 \
                and config.expert_model_parallel_size <= 1 \
                and config.ring_attention_size > 1 \
                and config.ulysses_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 4):
            if not cur_config in it:
                continue

            all_cp = [x.ring_attention_size for x in it]
            all_up = [x.ulysses_size for x in it]
            all_mbs = [x.micro_batch_size for x in it]

            same_cp_cnt = collections.Counter(all_cp)
            same_up_cnt = collections.Counter(all_up)
            same_mbs_cnt = collections.Counter(all_mbs)
            if max(same_cp_cnt.values()) == 3 and max(same_up_cnt.values()) == 3 and max(same_mbs_cnt.values()) == 3:
                basic_configs = it
                break

        return basic_configs

class TpAndCpAndUPAndMbsTemplate(Template):
    def get_basic_configs(self, search_space: List[ParallelConfig], cur_config: ParallelConfig):
        basic_configs = []

        temp_search_spaces = []
        for config in search_space:
            if config.expert_model_parallel_size <= 1 \
                and config.tensor_model_parallel_size > 1 \
                and config.ring_attention_size > 1 \
                and config.ulysses_size > 1 \
                and not Template.exist(temp_search_spaces, config):
                temp_search_spaces.append(config)

        for it in itertools.combinations(temp_search_spaces, 5):
            if not cur_config in it:
                continue

            all_tp = [x.tensor_model_parallel_size for x in it]
            all_cp = [x.ring_attention_size for x in it]
            all_up = [x.ulysses_size for x in it]
            all_mbs = [x.micro_batch_size for x in it]

            same_tp_cnt = collections.Counter(all_tp)
            same_cp_cnt = collections.Counter(all_cp)
            same_up_cnt = collections.Counter(all_up)
            same_mbs_cnt = collections.Counter(all_mbs)
            if max(same_tp_cnt.values()) == 4 and max(same_cp_cnt.values()) == 4 and max(same_up_cnt.values()) == 4 and max(same_mbs_cnt.values()) == 4:
                basic_configs = it
                break

        return basic_configs


                


