#!/usr/bin/python3
# coding=utf-8
#
# Copyright (C) 2023-2025. Huawei Technologies Co., Ltd. All rights reserved.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# ===============================================================================

import os
import numpy as np
import torch
from torch.nn import functional as F

dtype_emu = {torch.bfloat16: 0, torch.float16: 1, torch.float32: 2, torch.int8: 3, torch.int16: 4, torch.int32: 5}

def gen_golden_data_simple():
    dtype = torch.float16

    ## 核间均分，单核计算量对齐:
    # input_shape = [1, 256]
    ## 核间均分，单核计算量非对齐:
    # input_shape = [8, 1023]
    ## 核间不均分，单核计算量对齐:
    # input_shape = [32, 1023]
    ## 核间不均分，单核计算量非对齐:
    input_shape = [17, 1023]
    # 创建需要梯度的输入张量
    input_x = (torch.rand(17, 1023) * (10 - (-10)) + (-10)).to(dtype=dtype).requires_grad_(True)
    # 上游梯度
    input_dy = (torch.rand(17, 1023) * (2 - (-2)) + (-2)).to(dtype=dtype)
    
    # 前向计算：LeakyReLU
    alpha = 0.1
    out = F.leaky_relu(input_x, negative_slope=alpha)
    
    # 使用自动微分计算梯度
    golden_dx, = torch.autograd.grad(
        outputs=out,
        inputs=input_x,
        grad_outputs=input_dy,
        retain_graph=False
    )
    
    # 生成 tiling 数据
    tiling = np.array([input_shape[0] * input_shape[1], dtype_emu[dtype]], dtype=np.uint32)

    # 保存文件
    os.makedirs("./input", exist_ok=True)
    os.makedirs("./output", exist_ok=True)
    
    tiling.tofile("./input/input_tiling.bin")
    input_x.detach().numpy().tofile("./input/input_x.bin")
    input_dy.numpy().tofile("./input/input_dy.bin")
    golden_dx.numpy().tofile("./output/golden.bin")
    
    print(f"Data generation completed:")
    print(f"  - Input x shape: {input_x.shape}, range: [{input_x.min():.3f}, {input_x.max():.3f}]")
    print(f"  - Input dy shape: {input_dy.shape}, range: [{input_dy.min():.3f}, {input_dy.max():.3f}]")
    print(f"  - Output dx shape: {golden_dx.shape}, range: [{golden_dx.min():.3f}, {golden_dx.max():.3f}]")
    print(f"  - Alpha value: {alpha}")

if __name__ == "__main__":
    gen_golden_data_simple()