/**
 * Copyright (c) Huawei Technologies Co., Ltd. 2023. All rights reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/license/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
#ifndef OP_API_INC_LEVEL0_FLASH_ATTENTION_SCORE_GRAD_H_
#define OP_API_INC_LEVEL0_FLASH_ATTENTION_SCORE_GRAD_H_

#include "opdev/op_executor.h"

namespace l0op {

const std::size_t MAX_OPTIONAL_CNT = 9;
const std::size_t MAX_FAG_OUTPUT_CNT = 4;

const std::array<const aclTensor*, MAX_FAG_OUTPUT_CNT> FlashAttentionScoreGrad(
    const aclTensor* query, const aclTensor* key, const aclTensor* value, const aclTensor* dy,
    const aclTensor* pseShiftOptional, const aclTensor* dropMaskOptional, const aclTensor* paddingMaskOptional,
    const aclTensor* attenMaskOptional, const aclTensor* softmaxMaxOptional, const aclTensor* softmaxSumOptional,
    const aclTensor* softmaxInOptional, const aclTensor* attentionInOptional, const aclIntArray* prefixOptional,
    const aclIntArray* actualSeqQLenOptional, const aclIntArray* actualSeqKvLenOptional, double scaleValueOptional,
    double keepProbOptional, int64_t preTockensOptional, int64_t nextTockensOptional, int64_t headNum,
    char* inputLayout, int64_t innerPreciseOptional, int64_t sparseModeOptional, aclOpExecutor* executor);
}  // namespace l0op

#endif  // OP_API_INC_LEVEL0_FLASH_ATTENTION_SCORE_GRAD_H_