/**
 * Copyright (c) Huawei Technologies Co., Ltd. 2023-2024. All rights reserved.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

/*!
 * \file flash_attention_score_grad_tiling_s1s2_bn2.h
 * \brief
 */

#pragma once

#include "flash_attention_score_grad_tiling_common.h"
#include "tiling/tiling_base.h"
#include "tiling/tiling_type.h"
#include "flash_attention_score_grad_tiling_s1s2_bn2_def.h"

namespace optiling {
constexpr uint32_t CORE_LIST_NUM = 50;
struct TempParamsUs1s2Bbn2 {
  uint32_t dataTypeSize;
  uint32_t mask;
  uint32_t queryType;
  uint32_t branch;
  uint32_t calcMode;
  uint32_t b;
  uint32_t n2;
  uint32_t s1;
  uint32_t s2;
  uint32_t d;
  uint32_t layout;
  uint32_t bN2idxStarts[CORE_LIST_NUM];
  uint32_t bN2idxEnds[CORE_LIST_NUM];
  uint32_t actualSeqQlen[UNPAD_S1S2_BATCH_MAX_SIZE];
  uint32_t actualSeqKvlen[UNPAD_S1S2_BATCH_MAX_SIZE];
  uint32_t t1 = 0;
  uint32_t t2 = 0;
  int64_t sumS1S2Product = 0;
  std::vector<int64_t> s1s2Weight;
  PseConfig pse_cfg = PseConfig::NO_PSE;
  AttenMaskConfig atten_mask_cfg = AttenMaskConfig::NO_ATTEN_MASK;
  DropOutConfig drop_out_cfg = DropOutConfig::NO_DROP_OUT;
};

class FlashAttentionScoreGradTilingUs1s2Bbn2 : public TilingBaseClass {
 public:
  explicit FlashAttentionScoreGradTilingUs1s2Bbn2(gert::TilingContext* context_) : TilingBaseClass(context_) {
  }
  ~FlashAttentionScoreGradTilingUs1s2Bbn2() {
  }

  FlashAttentionScoreGradTilingDataUs1s2Bbn2 td_;

 protected:
  bool IsCapable() override;
  ge::graphStatus GetPlatformInfo() override;
  ge::graphStatus GetShapeAttrsInfo() override;
  ge::graphStatus DoOpTiling() override;
  ge::graphStatus DoLibApiTiling() override;
  uint64_t GetTilingKey() const override;
  ge::graphStatus GetWorkspaceSize() override;
  ge::graphStatus PostTiling() override;

 private:
  ge::graphStatus GetLayoutInfo();
  ge::graphStatus GetBaseShapeInfo();
  ge::graphStatus CheckOutOfTokens(const uint32_t s1, const uint32_t s2);
  ge::graphStatus CheckTokens();
  ge::graphStatus GetAttenMaskInfo();
  ge::graphStatus GetPseInfo();
  ge::graphStatus CheckAttenMaskShape();
  ge::graphStatus DoBlockTiling();
  ge::graphStatus DoCastTiling();
  ge::graphStatus DropoutTiling();
  ge::graphStatus SetBmm1TilingData(uint32_t sOut, uint32_t sFla, uint32_t l1SizeRemain);
  ge::graphStatus SetBmm31TilingData(uint32_t sOut, uint32_t l1SizeRemain);
  ge::graphStatus SetBmm4TilingData(uint32_t sOut, uint32_t sFla, uint32_t l1SizeRemain);
  ge::graphStatus SetBaseInfo(const gert::Shape& queryShape, const gert::Shape& keyShape, uint32_t dimN1);
  ge::graphStatus SetMaskShapeType(const gert::Shape& storageShape, const uint32_t maskShapeDims);
  void DecideBranch();
  void NMDStrategy();
  void DecideBaseMND();
  bool ProcessPrefix();
  ge::graphStatus SetSparseParams();
  void SetBandIdx();
  bool SparseTokenProcess();
  bool IsModuleOneShape();
  void VectorBaseMNSplit();
  void MatmulBaseMNSplit();
  void SFTBaseMDSplit();
  bool CheckForDichotomy(std::vector<int64_t>& nums, uint32_t x, uint32_t m);
  int64_t GetSplitArrayMinMaxSum(std::vector<int64_t>& s1s2WeightNums, uint32_t coreNum);

  TempParamsUs1s2Bbn2 tmpData_;
  uint32_t tensorSize{0};
  uint32_t dimD{0};
  uint32_t dimS2{0};
  uint32_t dimS1{0};
  uint32_t baseM{0};
  uint32_t baseN{0};
  uint32_t baseMmm{0};
  uint32_t baseNmm{0};
  uint32_t sftBaseM{0};
  uint32_t sftSingleM{0};
  uint32_t singleM{0};
  uint32_t singleN{0};
  uint32_t baseD{0};
  uint32_t s1Ratio{0};
  uint32_t s2Ratio{0};
  uint32_t mmRatio{0};
  bool needSplitD{false};
  uint32_t attenMaskCompressMode{0};
  uint32_t attenMaskS1Size{0};
  uint32_t attenMaskS2Size{0};
  SparseMode sparseMode = NO_MASK;
};

class FlashAttentionScoreGradTilingDeterministic : public FlashAttentionScoreGradTilingUs1s2Bbn2 {
 public:
  explicit FlashAttentionScoreGradTilingDeterministic(gert::TilingContext* context_)
      : FlashAttentionScoreGradTilingUs1s2Bbn2(context_) {
  }
  ~FlashAttentionScoreGradTilingDeterministic() {
  }

 protected:
  bool IsCapable() override;
};

}  // namespace optiling
