import sys 
import os
import random
import pytest
import torch
import torch_npu
import numpy as np
import math

sys.path.append(os.path.join(os.path.dirname(__file__), "../src"))
from block_sparse_attn_triton import block_sparse_attention, get_sparse_attn_mask
from block_sparse_attn_torch import reference_block_sparse_attention
from gen_test_input import gen_input, TEST_CASES
sys.path.append(os.path.join(os.path.dirname(__file__), "../../../precision"))
from compare import check_operator_accuracy
sys.path.append(os.path.join(os.path.dirname(__file__), "../../../profiler"))
from ascend_profiler import ascend_profiler_wrapper

# 运行设备
DEVICE = "npu"
torch.set_default_device(DEVICE)
torch.npu.set_device(6)
# 随机种子
seed = 42
torch.manual_seed(seed)
np.random.seed(seed) 
random.seed(seed)

def prof_block_sparse_attention(
    test_case,
):
    (batch_size, num_heads, k_len, q_len, head_size, homo_head, block_size, local_blocks, vert_strides, dtype) = test_case
    q, k, v, homo_head, block_size, local_blocks, vert_strides = gen_input(*test_case)
    scale = 1.0 / math.sqrt(float(q.shape[3]))

    # 取舍是否进行测量时间
    mask_csr, block_sparse_pattern, mask_dense = get_sparse_attn_mask(
        q, k.shape[2], block_size=block_size,
        local_blocks=local_blocks, 
        vert_strides=vert_strides, 
        homo_head=homo_head, 
        return_dense=True
    )

    def call():
        actual_output = block_sparse_attention(
            q, k, v, block_size, local_blocks, 
            vert_strides, homo_head, scale, block_sparse_pattern
        )

    # prof path
    prof_dir = "./prof_result/"
    # 这个要修改下
    case_str = "x".join(map(str, (batch_size, *num_heads, k_len, q_len, head_size, block_size, local_blocks, vert_strides)))
    # case_str = "x".join(map(str, test_case))
    result_path = prof_dir + case_str + "/"
    os.makedirs(result_path, exist_ok=True)
    ascend_profiler_wrapper(call, result_path, )

if __name__ == "__main__": 

    print("=" * 50, "\n")

    for i in range(len(TEST_CASES)): 

        print(f"case {i} "+"=" * 50)
        test_case = TEST_CASES[i]
        cmp_results = prof_block_sparse_attention(test_case)
        
    print("=" * 50, "\n")