# Copyright (c) Huawei Technologies Co., Ltd. 2023. All rights reserved.
import torch
import numpy as np
from atk.configs.dataset_config import InputDataset
from atk.configs.results_config import TaskResult
from atk.tasks.api_execute import register
from atk.tasks.api_execute.base_api import BaseApi
from atk.tasks.dataset.base_dataset import OpsDataset


@register("ascend_function_moe_gating_topK_softmax_v2")
class MethodTorchGroupNormApi(BaseApi):
    def __init__(self, task_result: TaskResult):
        super(MethodTorchGroupNormApi, self).__init__(task_result)

    def __call__(self, input_data: InputDataset, with_output: bool = False):
        def softmax_func(x, axis=None):
            x = x.astype(np.float32)
            x_max = x.max(axis=axis, keepdims = True)
            x_sub = x - x_max
            y = np.exp(x_sub)
            x_sum = y.sum(axis=axis, keepdims = True)
            ans = y / x_sum
            return ans, x_max, x_sum

        gating = input_data.kwargs["x"]
        finished_optional = input_data.kwargs["finishedOptional"]
        k = input_data.kwargs["k"]
        renorm = input_data.kwargs["renorm"]
        softmax_flag = input_data.kwargs["outputSoftmaxResultFlag"]
        gating_np = gating.to(torch.float32).cpu().numpy()

        if len(gating.shape) == 3:
            gating = gating.reshape(-1, gating.shape[-1])
            if finished_optional is not None:
                finished_optional = finished_optional.flatten()
        
        num_expert = gating.shape[-1]

        softmax, _, _ = softmax_func(gating_np, -1)

        out, indices = [], []
        if renorm == 1:
            indices = np.argsort(-gating_np, axis=-1, kind='stable')
            indices = indices[:, :k]
            values = np.take_along_axis(gating_np, indices, axis=-1)
            out, _, _ = softmax_func(values, -1)
        else:
            indices = np.argsort(-softmax, axis=-1, kind='stable')
            indices = indices[:, :k]
            out = np.take_along_axis(softmax, indices, axis=-1)

        if finished_optional is not None:
            finished_optional_np = finished_optional.cpu().numpy()
            finished_optional_np = finished_optional_np.reshape(finished_optional_np.shape[0], 1)
            finished_optional_np = np.tile(finished_optional_np, (1, k))
            indices = np.where(finished_optional_np, num_expert, indices)

        if renorm == 0 and softmax_flag:
            return torch.from_numpy(out).to(gating.device, dtype=gating.dtype), torch.from_numpy(indices).to(gating.device, dtype=torch.int32), torch.from_numpy(softmax).to(gating.device, dtype=torch.float)
        else:
            return torch.from_numpy(out).to(gating.device, dtype=gating.dtype), torch.from_numpy(indices).to(gating.device, dtype=torch.int32), torch.from_numpy(softmax).to(gating.device, dtype=torch.float)