/**
 * Copyright (c) 2023-2024 Huawei Technologies Co., Ltd.
 *
 * ascendc_ops is licensed under Mulan PSL v2.
 * You can use this software according to the terms and conditions of the Mulan PSL v2.
 * You may obtain a copy of Mulan PSL v2 at:
 *
 * http://license.coscl.org.cn/MulanPSL2
 *
 * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
 * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
 * MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
 * See the Mulan PSL v2 for more details.
 */

/*!
 * \file flash_attention_score_grad_tiling_unpadded_attension.cc
 * \brief
 */

#include "flash_attention_score_grad_tiling_s1s2_bn2gs1s2.h"
#include "tiling/tiling_templates_registry.h"

namespace optiling {

class FlashAttentionScoreGradTilingUnpaddedAttension : public FlashAttentionScoreGradTilingS1s2Bn2gs1s2 {
public:
    explicit FlashAttentionScoreGradTilingUnpaddedAttension(gert::TilingContext *context_)
        : FlashAttentionScoreGradTilingS1s2Bn2gs1s2(context_)
    {
    }

    bool IsCapable() override
    {
        auto actualSeqQLenTensor = context_->GetOptionalInputTensor(ACTUAL_SEQ_Q_LEN);
        if (actualSeqQLenTensor != nullptr && actualSeqQLenTensor->GetShapeSize() != 0) {
            OPS_LOG_D("FlashAttentionScoreGradTilingUnpaddedAttension hit");
            return true;
        }

        return false;
    };
};

REGISTER_TILING_TEMPLATE("FlashAttentionScoreGrad", FlashAttentionScoreGradTilingUnpaddedAttension, 2000);

} // namespace optiling
