import sys
from abc import ABC, abstractmethod
from typing import Dict, List, Tuple
from geesibling.adapters.pytorch.auto_parallel_simulator.mcmc_stratege import global_sizes, mcmc_search

class AbstractStrategy(ABC):

    # 返回 三个列表，分别表示：
    # 1. best_stratge
    # 2. best_layers_pp_stratege
    # 3. best_layers_tp_stratege
    @abstractmethod
    def search_strategy(self, model_name: str, world_size: int, layer_num: int, layer_info: Dict[str, Dict[str, object]], num_exports: int = 0) -> Tuple[List[int], List[int], List[int]]:
        pass

class MCMCStrategy(AbstractStrategy):
    def search_strategy(self, model_name, world_size, layer_num, layer_info, num_exports = 0):
        print(f'world_size is {world_size}, layer_num = {layer_num}, model_name = {model_name}, num_exports= {num_exports}')
        best_time_cost = float('inf')  #设置一个损失上限
        best_balance = float('inf')
        best_layers_tp_stratege = []
        pp_layers_per_partition = []
        current_best_strategy = []
        current_best_tp_size = []
        for size in global_sizes(model_name, world_size, num_exports):
            if world_size // size[2] >= layer_num: continue
            pp_size = size[0]
            num_iterations = 1000
            current_best_strategy, current_best_tp_size, current_best_time_cost, current_best_balance = mcmc_search(model_name=model_name, pp_size=pp_size, layer_nums=layer_num, num_iterations=num_iterations, layer_info=layer_info, tp_gpus=world_size // size[2], dp_size=size[2], ep_size=size[3] if model_name in ['mixtral'] else 1, experts_num=num_exports)
            
            # if current_best_cost == []: continue

            # if size[3] != 1 and size[0] in [1,2]:
            #     print(f'current_best_cost: {current_best_cost}, best_cost: {best_cost}, current_best_memory: {current_best_memory}, best_memory: {best_memory}, size: {size}')
            if (current_best_time_cost < best_time_cost and current_best_balance < best_balance) or (current_best_balance == best_balance and current_best_time_cost < best_time_cost):
                # print(f'okokokokokokokokokokokok1111111111111111111111111 current_best_strategy is {current_best_strategy}, current_best_tp_size is {current_best_tp_size}, current_best_rate is {current_best_rate}, current_best_memory is {current_best_memory}, current_best_balence is {current_best_balance}')
                # print(f'best_cost: {current_best_cost}, best_memory: {current_best_memory}, size: {size}')
                best_stratge = size.copy()
                best_layers_pp_stratege = current_best_strategy
                best_layers_tp_stratege = current_best_tp_size
                best_time_cost = current_best_time_cost
                best_balance = current_best_balance

        return best_stratge, best_layers_pp_stratege, best_layers_tp_stratege

class AutoParallelContext:
    def __init__(self, strategy: AbstractStrategy):
        self.strategy = strategy

    def set_strategy(self, strategy: AbstractStrategy):
        """动态切换策略"""
        self.strategy = strategy

    def execute_auto_parallel(self, model_name: str, world_size: int, layer_num: int, layer_info: Dict[str, Dict[str, object]], num_exports: int = 0):
        return self.strategy.search_strategy(model_name, world_size, layer_num, layer_info, num_exports)