#!/usr/bin/env python3
# coding: utf-8
# Copyright (c) 2025 Huawei Technologies Co., Ltd.
# This file is a part of the CANN Open Software.
# Licensed under CANN Open Software License Agreement Version 1.0 (the "License").
# Please refer to the License for details. You may not use this file except in compliance with the License.
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE.
# See LICENSE in the root of the software repository for the full text of the License.
# ======================================================================================================================
import random
from typing import Union, List

from atk.case_generator.generator.generate_types import GENERATOR_REGISTRY
from atk.case_generator.generator.base_generator import CaseGenerator
from atk.configs.case_config import InputCaseConfig, CaseConfig


@GENERATOR_REGISTRY.register("ascend_generate_grouped_matmul")
class GroupedMatmulAddGenerator(CaseGenerator):

    def __init__(self, config):
        super().__init__(config)
        self.m = 0
        self.k = 0
        self.n = 0
        self.e = 0
    def after_case_config(self, case_config: CaseConfig) -> CaseConfig:
        self.m = random.randint(1,2048)
        self.k = random.randint(1,7168)
        self.n = random.randint(1,7168)
        x_shape = case_config.inputs[0][0].shape
        w_shape = case_config.inputs[1][0].shape
        bias_shape = case_config.inputs[2][0].shape
        scale = case_config.inputs[3][0]
        scale_shape = scale.shape
        perTokenScale_shape = case_config.inputs[4][0].shape
        group_list_shape = case_config.inputs[5].shape
        wformat = case_config.inputs[11].range_values
        if wformat == 'FRACTAL_NZ':
            self.n = self.n // 32 * 32 + 32
            self.k = self.k // 16 * 16 + 16
        # x shape
        x_shape[0] = self.m  # m
        self.e = random.randint(1, min(x_shape[0], 16))
        x_shape[1] = self.k # k
        
        # weight shape
        w_shape[0] = self.e # e
        w_shape[1] = self.k # k
        w_shape[2] = self.n # n

        # bias shape
        bias_shape[0] = self.e
        bias_shape[1] = self.n 

        # scale shape
        scale_shape[0] = self.e
        scale_shape[1] = self.n

        # pertokenScale shape
        perTokenScale_shape[0] = self.m

        #groupList 
        group_list_shape[0] = self.e
        case_config.inputs[5].range_values = [1, self.m]

        #tuningConfig
        case_config.inputs[10][0].range_values = [1, min(self.m // self.e, 512)]
        if scale.dtype == "bf16":
            case_config.inputs[12].range_values = "bf16"
        else:
            case_config.inputs[12].range_values = "fp16"

        return case_config
    