# 
# Copyright (c) 2025 Huawei Technologies Co., Ltd.
# This program is free software, you can redistribute it and/or modify it under the terms and conditions of
# CANN Open Software License Agreement Version 2.0 (the "License").
# Please refer to the License for details. You may not use this file except in compliance with the License.
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE.
# See LICENSE in the root of the software repository for the full text of the License.
# 
import os
import unittest
import numpy as np
import torch
import sys
import logging
import torch_npu
import op_test
from tensor_file import read_tensor
# np.set_printoptions(threshold=np.inf)

OP_NAME = "ToppsampleOperation"
OP_PARAM0 = {'randSeed':[0]}

rand = [0.840188, 0.394383, 0.783099, 0.79844, 0.911647, 0.197551, 0.335223, 0.76823, 0.277775, 0.55397, 0.477397, 0.628871, 0.364784, 0.513401, 0.95223, 0.916195, 0.635712, 0.717297, 0.141603, 0.606969, 0.0163006, 0.242887, 0.137232, 0.804177, 0.156679, 0.400944, 0.12979, 0.108809, 0.998924, 0.218257, 0.512932, 0.839112, 0.61264, 0.296032, 0.637552, 0.524287, 0.493583, 0.972775, 0.292517, 0.771358, 0.526745, 0.769914, 0.400229, 0.891529, 0.283315, 0.352458, 0.807725, 0.919026, 0.0697553, 0.949327, 0.525995, 0.0860558, 0.192214, 0.663227, 0.890233, 0.348893, 0.0641713, 0.020023, 0.457702, 0.0630958, 0.23828, 0.970634, 0.902208, 0.85092, 0.266666, 0.53976, 0.375207, 0.760249, 0.512535, 0.667724, 0.531606, 0.0392803, 0.437638, 0.931835, 0.93081, 0.720952, 0.284293, 0.738534, 0.639979, 0.354049, 0.687861, 0.165974, 0.440105, 0.880075, 0.829201, 0.330337, 0.228968, 0.893372, 0.35036, 0.68667, 0.956468, 0.58864, 0.657304, 0.858676, 0.43956, 0.92397, 0.398437, 0.814767, 0.684219, 0.910972, 0.482491, 0.215825, 0.950252, 0.920128, 0.14766, 0.881062, 0.641081, 0.431953, 0.619596, 0.281059, 0.786002, 0.307458, 0.447034, 0.226107, 0.187533, 0.276235, 0.556444, 0.416501, 0.169607, 0.906804, 0.103171, 0.126075, 0.495444, 0.760475, 0.984752, 0.935004, 0.684445, 0.383188, 0.749771, 0.368664, 0.29416, 0.232262, 0.584489, 0.244413, 0.15239, 0.732149, 0.125475, 0.79347, 0.164102, 0.745071, 0.0745298, 0.950104, 0.0525293, 0.521563, 0.176211, 0.240062, 0.797798, 0.732654, 0.656564, 0.967405, 0.639458, 0.759735, 0.0934805, 0.134902, 0.52021, 0.0782321, 0.0699064, 0.204655, 0.46142, 0.819677, 0.573319, 0.755581, 0.0519388, 0.157807, 0.999994, 0.204329, 0.889956, 0.125468, 0.997799, 0.0540576, 0.87054, 0.0723288, 0.00416161, 0.923069, 0.593892, 0.180372, 0.163132, 0.39169, 0.913027, 0.819695, 0.359095, 0.552485, 0.57943, 0.452576, 0.687387, 0.0996401, 0.530808, 0.757294, 0.304295, 0.992228, 0.576971, 0.877614, 0.747809, 0.62891, 0.0354209, 0.747803, 0.833239, 0.925377, 0.873271, 0.831038, 0.979434, 0.743811, 0.903366, 0.983596, 0.66688, 0.497259, 0.163968, 0.830012, 0.888949, 0.0769947, 0.649707, 0.248044, 0.62948, 0.229137, 0.70062, 0.316867, 0.328777, 0.231428, 0.074161, 0.633072, 0.223656, 0.651132, 0.510686, 0.971466, 0.280042, 0.546107, 0.719269, 0.113281, 0.471483, 0.59254, 0.944318, 0.450918, 0.336351, 0.847684, 0.434513, 0.00323146, 0.344943, 0.598481, 0.833243, 0.233892, 0.675476, 0.48295, 0.481936, 0.304956, 0.712087, 0.182556, 0.621823, 0.0408643, 0.413984, 0.695984, 0.673936, 0.63764, 0.347116, 0.184622, 0.609106, 0.627158, 0.730729, 0.328374, 0.740438, 0.202213, 0.920914, 0.684757, 0.65313, 0.257265, 0.532441, 0.0876436, 0.260497, 0.877384, 0.686125, 0.0937402, 0.111276, 0.361601, 0.57669, 0.593211, 0.666557, 0.288778, 0.775767, 0.288379, 0.329642, 0.189751, 0.984363, 0.00357857, 0.827391, 0.331479, 0.188201, 0.436497, 0.958637, 0.91893, 0.764871, 0.699075, 0.121143, 0.685786, 0.383832, 0.774274, 0.943051, 0.916273, 0.861917, 0.203548, 0.793657, 0.548042, 0.297288, 0.904932, 0.909643, 0.873979, 0.498144, 0.5762, 0.162757, 0.273911, 0.864579, 0.492399, 0.463662, 0.848942, 0.495977, 0.291053, 0.180421, 0.684178, 0.72755, 0.139058, 0.603109, 0.492422, 0.838134, 0.724252, 0.178208, 0.221966, 0.498525, 0.121259, 0.138238, 0.360443, 0.324807, 0.931895, 0.908485, 0.622095, 0.836828, 0.818128, 0.496074, 0.334972, 0.394327, 0.658831, 0.608883, 0.258906, 0.15123, 0.072545, 0.107848, 0.647207, 0.363598, 0.28827, 0.331386, 0.0911486, 0.427328, 0.934495, 0.58357, 0.265461, 0.658747, 0.761778, 0.487427, 0.157272, 0.883037, 0.625665, 0.517715, 0.207844, 0.557561, 0.426199, 0.829939, 0.394388, 0.244327, 0.326013, 0.72936, 0.638654, 0.984845, 0.338243, 0.89756, 0.136075, 0.410788, 0.00540855, 0.783282, 0.774386, 0.293678, 0.114668, 0.865535, 0.721006, 0.0491625, 0.449105, 0.986467, 0.707909, 0.210883, 0.473894, 0.865181, 0.0939195, 0.0995593, 0.382896, 0.301763, 0.65712, 0.809095, 0.131702, 0.0515083, 0.0534223, 0.457716, 0.780868, 0.692076, 0.44256, 0.119111, 0.589637, 0.578635, 0.529899, 0.595045, 0.361917, 0.304285, 0.888723, 0.476585, 0.16982, 0.609729, 0.525747, 0.618925, 0.596196, 0.233656, 0.829808, 0.0700902, 0.0988374, 0.923728, 0.169649, 0.481733, 0.225491, 0.826769, 0.290829, 0.357193, 0.878278, 0.344251, 0.814909, 0.659146, 0.0363274, 0.257469, 0.778257, 0.625964, 0.836104, 0.308157, 0.221009, 0.198021, 0.612442, 0.109733, 0.674605, 0.782262, 0.719462, 0.200352, 0.401188, 0.315658, 0.434009, 0.230996, 0.385748, 0.532846, 0.154724, 0.555398, 0.0145793, 0.380215, 0.382167, 0.305408, 0.737408, 0.260445, 0.649659, 0.552316, 0.919591, 0.685986, 0.809785, 0.697848, 0.31195, 0.645889, 0.00600477, 0.53296, 0.84391, 0.618447, 0.642693, 0.518515, 0.400709, 0.362154, 0.718867, 0.801897, 0.677812, 0.152876, 0.0328927, 0.0635606, 0.685722, 0.187616, 0.618958, 0.700301, 0.567831, 0.00112548, 0.00570914, 0.305239, 0.26157, 0.655368, 0.857555, 0.181161, 0.341354, 0.667341, 0.879009, 0.653305, 0.31323, 0.885014, 0.186265, 0.157139, 0.503461, 0.828957, 0.675654, 0.90417, 0.191112, 0.394521, 0.706067, 0.868924, 0.547397, 0.738959, 0.932485, 0.233119, 0.926576, 0.551443, 0.93342, 0.494407, 0.552568, 0.939129, 0.799646, 0.814139, 0.594497, 0.657201, 0.9953, 0.935852, 0.324541, 0.874309, 0.589157, 0.637771, 0.759324, 0.775421, 0.79491, 0.262785, 0.604379, 0.470564, 0.166955, 0.79549, 0.865085, 0.873021, 0.664414, 0.412483, 0.611981, 0.596899, 0.645602, 0.538557, 0.148342, 0.579022, 0.0329634, 0.70091, 0.518151, 0.832609, 0.515049, 0.112648, 0.48981, 0.510349, 0.0484997, 0.814351, 0.384658, 0.637656, 0.452122, 0.143982, 0.413078, 0.247033, 0.406767, 0.0174566, 0.717597, 0.573721, 0.812947, 0.582682, 0.446743, 0.477361, 0.995165, 0.0587232, 0.0742604, 0.640766, 0.59728, 0.222602, 0.219788, 0.630243, 0.923513, 0.737939, 0.462852, 0.438562, 0.850586, 0.952662, 0.948911, 0.899086, 0.767014, 0.333569, 0.536743, 0.219136, 0.477551, 0.94982, 0.466169, 0.884318, 0.967277, 0.183765, 0.458039, 0.780224, 0.766448, 0.904782, 0.257585, 0.761612, 0.963505, 0.331846, 0.402379, 0.560785, 0.554448, 0.622167, 0.191028, 0.477961, 0.360105, 0.65388, 0.916523, 0.210692, 0.606542, 0.865434, 0.109778, 0.373556, 0.199003, 0.64652, 0.592692, 0.676554, 0.596341, 0.0588605, 0.560872, 0.563617, 0.242626, 0.0189108, 0.343841, 0.00907344, 0.923692, 0.601427, 0.770686, 0.887197, 0.933273, 0.173065, 0.447982, 0.487721, 0.795231, 0.639009, 0.965682, 0.155336, 0.292889, 0.882204, 0.366028, 0.899431, 0.747638, 0.475806, 0.272987, 0.94664, 0.122326, 0.865679, 0.623194, 0.718666, 0.92454, 0.184066, 0.282284, 0.167165, 0.202977, 0.626125, 0.176239, 0.126669, 0.227552, 0.946925, 0.0138663, 0.160824, 0.119989, 0.461848, 0.648545, 0.915221, 0.100857, 0.614227, 0.070557, 0.393746, 0.496431, 0.436585, 0.293177, 0.244069, 0.912391, 0.566164, 0.190709, 0.0347164, 0.431844, 0.813904, 0.753383, 0.356383, 0.99797, 0.0356664, 0.523548, 0.200947, 0.661792, 0.699787, 0.327616, 0.889343, 0.646712, 0.341482, 0.0501679, 0.766701, 0.80333, 0.698713, 0.681922, 0.904187, 0.31294, 0.752479, 0.297933, 0.809371, 0.189064, 0.591111, 0.0534394, 0.101454, 0.157275, 0.244149, 0.136171, 0.589119, 0.0580523, 0.889553, 0.945502, 0.0560222, 0.92522, 0.46905, 0.256969, 0.587011, 0.168837, 0.584585, 0.476355, 0.815549, 0.926068, 0.526523, 0.58225, 0.729398, 0.225236, 0.264172, 0.633585, 0.538175, 0.0166506, 0.931518, 0.347546, 0.205714, 0.522629, 0.400985, 0.307168, 0.679904, 0.645134, 0.443339, 0.269022, 0.703186, 0.332892, 0.214524, 0.759208, 0.258112, 0.683574, 0.0161775, 0.845123, 0.852411, 0.600763, 0.321478, 0.66796, 0.52683, 0.848, 0.25021, 0.256228, 0.0732357, 0.514382, 0.889813, 0.611411, 0.531033, 0.821331, 0.958957, 0.736747, 0.343959, 0.359942, 0.0439153, 0.0238632, 0.0050762, 0.487254, 0.292886, 0.708262, 0.820146, 0.50741, 0.467471, 0.0782579, 0.190984, 0.483648, 0.923381, 0.0433947, 0.084411, 0.244858, 0.711355, 0.611241, 0.0928584, 0.961565, 0.867469, 0.166094, 0.475947, 0.757282, 0.777505, 0.00698012, 0.578613, 0.736462, 0.743727, 0.922572, 0.0964041, 0.787642, 0.946435, 0.10148, 0.274897, 0.239321, 0.809743, 0.0950428, 0.74673, 0.277214, 0.173301, 0.937714, 0.760862, 0.0966814, 0.981109, 0.845273, 0.34154, 0.692463, 0.456514, 0.434398, 0.654029, 0.323983, 0.600492, 0.129976, 0.081265, 0.377997, 0.136956, 0.659878, 0.114459, 0.880683, 0.58245, 0.210863, 0.668326, 0.528885, 0.312343, 0.943222, 0.768206, 0.122086, 0.0382648, 0.514936, 0.3993, 0.211565, 0.45265, 0.160162, 0.308247, 0.433758, 0.00543489, 0.649787, 0.126222, 0.461949, 0.0841846, 0.78025, 0.785932, 0.684677, 0.910227, 0.867197, 0.0626739, 0.0471826, 0.527075, 0.177133, 0.927866, 0.109525, 0.387996, 0.596191, 0.638409, 0.70034, 0.539413, 0.406615, 0.822426, 0.577678, 0.921551, 0.221726, 0.789244, 0.374201, 0.381888, 0.0974906, 0.807959, 0.387323, 0.747277, 0.934181, 0.849272, 0.831462, 0.714432, 0.635204, 0.516139, 0.624658, 0.502401, 0.578813, 0.671841, 0.0294762, 0.755946, 0.599707, 0.139001, 0.143942, 0.195898, 0.77741, 0.844281, 0.735311, 0.184025, 0.666707, 0.31299, 0.105576, 0.888433, 0.102233, 0.479777, 0.270321, 0.199724, 0.287736, 0.657643, 0.947001, 0.221918, 0.506915, 0.778463, 0.936349, 0.142119, 0.294601, 0.561007, 0.64452, 0.873414, 0.232848, 0.673996, 0.629359, 0.832555, 0.812997, 0.773301, 0.0284525, 0.590407, 0.617582, 0.763764, 0.774432, 0.284289, 0.0767534, 0.880009, 0.172722, 0.178987, 0.359786, 0.443043, 0.37871, 0.647522, 0.100686, 0.325711, 0.86944, 0.6076, 0.104174, 0.805789, 0.749719, 0.398775, 0.366796, 0.394239, 0.272189, 0.599644, 0.0682348, 0.901549, 0.432199, 0.881232, 0.67485, 0.460652, 0.471639, 0.292432, 0.224415, 0.246071, 0.576721, 0.301169, 0.12608, 0.749443, 0.480155, 0.485866, 0.192486, 0.858866, 0.133388, 0.293171, 0.184577, 0.00282779, 0.900772, 0.288752, 0.808617, 0.650491, 0.687527, 0.175413, 0.0447295, 0.959716, 0.775058, 0.112964, 0.861265, 0.207257, 0.994196, 0.536115, 0.667908, 0.465835, 0.828546, 0.892324, 0.711906, 0.405267, 0.193493, 0.837986, 0.154711, 0.673648, 0.323852, 0.347196, 0.532514, 0.45724, 0.640368, 0.717092, 0.460067, 0.54114, 0.00584319, 0.268684, 0.19163, 0.69337, 0.444097, 0.23636, 0.653087, 0.219155, 0.349324, 0.514352, 0.426412, 0.34352, 0.0504663, 0.0943199, 0.809355, 0.879013, 0.986644, 0.521261, 0.28428
]
class TestToppSample(op_test.OpTest):
    def golden_calc(self, in_tensors):
        cumsumed = np.array(in_tensors[0].cpu().float()).astype(np.float32)
        topp = np.array(in_tensors[1].cpu().float()).astype(np.float32)
        bool_judge = (cumsumed < topp)
        res_select_range = np.sum(bool_judge, axis=-1, keepdims=True)
        sum_val = res_select_range - 1 # 要判断是否为0，为0直接返回0
        sum_val[sum_val < 0] = 0
        topp_v = np.take_along_axis(cumsumed, sum_val, axis=-1)
        randnp =np.array(rand).reshape(-1, 1)
        randnp_new = np.zeros(cumsumed.shape[0])
        for i in range(cumsumed.shape[0]):
            randnp_new[i] = randnp[i % 512]
        randnp_new = randnp_new.reshape(-1, 1)
        topp_v_new = randnp_new[0:cumsumed.shape[0]] * topp_v
        bool_judge_one = (cumsumed < topp_v_new)

        res_index = np.sum(bool_judge_one, axis=-1, keepdims=True) # 要判断是否为0，为0直接返回0
        res_index[res_index < 0] = 0

        return [torch.tensor(res_index).int(), torch.tensor(res_select_range).int()]

    def golden_compare(self, out_tensors, golden_out_tensors):
        if len(out_tensors) != len(golden_out_tensors):
            logging.error(f"Out size [{len(out_tensors)}] not equal to golden size [{len(golden_out_tensors)}]")
            return False
        for i in range(len(out_tensors)):
            precision_standard = torch.abs(out_tensors[i] - golden_out_tensors[i]) <= (1 / 128) * golden_out_tensors[i]
            precision_standard = np.array(precision_standard)
            precision_sum = np.sum(precision_standard)
            if precision_sum != out_tensors[i].shape[0]:
                return False
        return True

    @op_test.skip_310b
    @op_test.skip_910a    
    def test_topp_case65535(self):
        shape1 = (512, 1)
        torch.manual_seed(0)
        probs = torch.randn(512, 65535).float()
        sm = torch.nn.Softmax(dim=-1)
        probs = sm(probs).numpy()
        cumsumed = np.cumsum(probs, axis=-1).astype(np.float16)
        topp = torch.empty(shape1, dtype=torch.half).uniform_(0, 1)

        self.set_param(OP_NAME, OP_PARAM0)
        self.execute([torch.from_numpy(cumsumed), topp], 
                     [torch.zeros(shape1).int(), torch.zeros(shape1).int()])

    @op_test.skip_310b
    @op_test.skip_910a        
    def test_topp_case254208(self):
        shape1 = (200, 1)
        torch.manual_seed(0)
        probs = torch.randn(200, 254208).float()
        sm = torch.nn.Softmax(dim=-1)
        probs = sm(probs).numpy()
        cumsumed = np.cumsum(probs, axis=-1).astype(np.float16)
        topp = torch.empty(shape1, dtype=torch.half).uniform_(0, 1)
        self.set_param(OP_NAME, OP_PARAM0)
        self.execute([torch.from_numpy(cumsumed), topp], 
                     [torch.zeros(shape1).int(), torch.zeros(shape1).int()])

    @op_test.skip_310b
    @op_test.skip_910a        
    def test_topp_case65024(self):
        shape1 = (33, 1)
        torch.manual_seed(0)
        probs = torch.randn(33, 65024).float()
        sm = torch.nn.Softmax(dim=-1)
        probs = sm(probs).numpy()
        cumsumed = np.cumsum(probs, axis=-1).astype(np.float16)

        topp = torch.empty(shape1, dtype=torch.half).uniform_(0, 1)

        self.set_param(OP_NAME, OP_PARAM0)
        self.execute([torch.from_numpy(cumsumed), topp], 
                     [torch.zeros(shape1).int(), torch.zeros(shape1).int()])

    @op_test.skip_310b
    @op_test.skip_910a
    def test_topp_case128000(self):
        shape1 = (330, 1)
        torch.manual_seed(0)
        probs = torch.randn(330, 128000).float()
        sm = torch.nn.Softmax(dim=-1)
        probs = sm(probs).numpy()
        cumsumed = np.cumsum(probs, axis=-1).astype(np.float16)

        topp = torch.empty(shape1, dtype=torch.half).uniform_(0, 1)

        self.set_param(OP_NAME, OP_PARAM0)
        self.execute([torch.from_numpy(cumsumed), topp], 
                     [torch.zeros(shape1).int(), torch.zeros(shape1).int()])

    @op_test.skip_310b
    @op_test.skip_910a
    def test_topp_case32000(self):
        shape1 = (330, 1)
        torch.manual_seed(0)
        probs = torch.randn(330, 32000).float()
        sm = torch.nn.Softmax(dim=-1)
        probs = sm(probs).numpy()
        cumsumed = np.cumsum(probs, axis=-1).astype(np.float16)

        topp = torch.empty(shape1, dtype=torch.half).uniform_(0, 1)

        self.set_param(OP_NAME, OP_PARAM0)
        self.execute([torch.from_numpy(cumsumed), topp], 
                     [torch.zeros(shape1).int(), torch.zeros(shape1).int()])

    @op_test.skip_310b
    @op_test.skip_910a
    def test_topp_case158464(self):
        shape1 = (330, 1)
        torch.manual_seed(0)
        probs = torch.randn(330, 158464).float()
        sm = torch.nn.Softmax(dim=-1)
        probs = sm(probs).numpy()
        cumsumed = np.cumsum(probs, axis=-1).astype(np.float16)

        topp = torch.empty(shape1, dtype=torch.half).uniform_(0, 1)

        self.set_param(OP_NAME, OP_PARAM0)
        self.execute([torch.from_numpy(cumsumed), topp], 
                     [torch.zeros(shape1).int(), torch.zeros(shape1).int()])

    @op_test.skip_310b
    @op_test.skip_910a                     
    def test_topp_case100352(self):
        shape1 = (512, 1)
        torch.manual_seed(0)
        probs = torch.randn(512, 100352).float()
        sm = torch.nn.Softmax(dim=-1)
        probs = sm(probs).numpy()
        cumsumed = np.cumsum(probs, axis=-1).astype(np.float16)

        topp = torch.empty(shape1, dtype=torch.half).uniform_(0, 1)

        self.set_param(OP_NAME, OP_PARAM0)
        self.execute([torch.from_numpy(cumsumed), topp], 
                     [torch.zeros(shape1).int(), torch.zeros(shape1).int()])

    @op_test.skip_310b
    @op_test.skip_910a
    def test_topp_case_bug_fix_182744(self):
        shape1 = (249, 1)
        torch.manual_seed(0)
        probs = torch.randn(249, 182744).float()
        sm = torch.nn.Softmax(dim=-1)
        probs = sm(probs).numpy()
        cumsumed = np.cumsum(probs, axis=-1).astype(np.float16)
        topp = torch.empty(shape1, dtype=torch.half).uniform_(0, 1)

        self.set_param(OP_NAME, OP_PARAM0)
        self.execute([torch.from_numpy(cumsumed), topp], 
                    [torch.zeros(shape1).int(), torch.zeros(shape1).int()])

    @op_test.only_910b 
    def test_topp_bf16_case65535(self):
        shape1 = (512, 1)
        torch.manual_seed(0)
        probs = torch.randn(512, 65535).float()
        sm = torch.nn.Softmax(dim=-1)
        probs = sm(probs).numpy()
        cumsumed = np.cumsum(probs, axis=-1)
        topp = torch.empty(shape1, dtype=torch.bfloat16).uniform_(0, 1)

        self.set_param(OP_NAME, OP_PARAM0)
        self.execute([torch.from_numpy(cumsumed).bfloat16(), topp], 
                    [torch.zeros(shape1).int(), torch.zeros(shape1).int()])

    @op_test.only_910b       
    def test_topp_bf16_case254208(self):
        shape1 = (200, 1)
        torch.manual_seed(0)
        probs = torch.randn(200, 254208).float()
        sm = torch.nn.Softmax(dim=-1)
        probs = sm(probs).numpy()
        cumsumed = np.cumsum(probs, axis=-1)
        topp = torch.empty(shape1, dtype=torch.bfloat16).uniform_(0, 1)
        self.set_param(OP_NAME, OP_PARAM0)
        self.execute([torch.from_numpy(cumsumed).bfloat16(), topp], 
                     [torch.zeros(shape1).int(), torch.zeros(shape1).int()])

    @op_test.only_910b      
    def test_topp_bf16_case65024(self):
        shape1 = (33, 1)
        torch.manual_seed(0)
        probs = torch.randn(33, 65024).float()
        sm = torch.nn.Softmax(dim=-1)
        probs = sm(probs).numpy()
        cumsumed = np.cumsum(probs, axis=-1)
        topp = torch.empty(shape1, dtype=torch.bfloat16).uniform_(0, 1)

        self.set_param(OP_NAME, OP_PARAM0)
        self.execute([torch.from_numpy(cumsumed).bfloat16(), topp], 
                     [torch.zeros(shape1).int(), torch.zeros(shape1).int()])

    @op_test.only_910b
    def test_topp_bf16_case128000(self):
        shape1 = (330, 1)
        torch.manual_seed(0)
        probs = torch.randn(330, 128000).float()
        sm = torch.nn.Softmax(dim=-1)
        probs = sm(probs).numpy()
        cumsumed = np.cumsum(probs, axis=-1)
        topp = torch.empty(shape1, dtype=torch.bfloat16).uniform_(0, 1)

        self.set_param(OP_NAME, OP_PARAM0)
        self.execute([torch.from_numpy(cumsumed).bfloat16(), topp], 
                     [torch.zeros(shape1).int(), torch.zeros(shape1).int()])

    @op_test.only_910b
    def test_topp_bf16_case32000(self):
        shape1 = (330, 1)
        torch.manual_seed(0)
        probs = torch.randn(330, 32000).float()
        sm = torch.nn.Softmax(dim=-1)
        probs = sm(probs).numpy()
        cumsumed = np.cumsum(probs, axis=-1)
        topp = torch.empty(shape1, dtype=torch.bfloat16).uniform_(0, 1)

        self.set_param(OP_NAME, OP_PARAM0)
        self.execute([torch.from_numpy(cumsumed).bfloat16(), topp], 
                     [torch.zeros(shape1).int(), torch.zeros(shape1).int()])

    @op_test.only_910b
    def test_topp_bf16_case158464(self):
        shape1 = (330, 1)
        torch.manual_seed(0)
        probs = torch.randn(330, 158464).float()
        sm = torch.nn.Softmax(dim=-1)
        probs = sm(probs).numpy()
        cumsumed = np.cumsum(probs, axis=-1)
        topp = torch.empty(shape1, dtype=torch.bfloat16).uniform_(0, 1)

        self.set_param(OP_NAME, OP_PARAM0)
        self.execute([torch.from_numpy(cumsumed).bfloat16(), topp],  
                     [torch.zeros(shape1).int(), torch.zeros(shape1).int()])

    @op_test.only_910b                    
    def test_topp_bf16_case100352(self):
        shape1 = (512, 1)
        torch.manual_seed(0)
        probs = torch.randn(512, 100352).float()
        sm = torch.nn.Softmax(dim=-1)
        probs = sm(probs).numpy()
        cumsumed = np.cumsum(probs, axis=-1)
        topp = torch.empty(shape1, dtype=torch.bfloat16).uniform_(0, 1)

        self.set_param(OP_NAME, OP_PARAM0)
        self.execute([torch.from_numpy(cumsumed).bfloat16(), topp], 
                     [torch.zeros(shape1).int(), torch.zeros(shape1).int()])

    @op_test.only_910b
    def test_topp_bf16_case_bug_fix_182744(self):
        shape1 = (249, 1)
        torch.manual_seed(0)
        probs = torch.randn(249, 182744).float()
        sm = torch.nn.Softmax(dim=-1)
        probs = sm(probs).numpy()
        cumsumed = np.cumsum(probs, axis=-1)
        topp = torch.empty(shape1, dtype=torch.bfloat16).uniform_(0, 1)

        self.set_param(OP_NAME, OP_PARAM0)
        self.execute([torch.from_numpy(cumsumed).bfloat16(), topp], 
                     [torch.zeros(shape1).int(), torch.zeros(shape1).int()])

if __name__ == '__main__':
    unittest.main()
