/**
 * Copyright 2019-2020 Huawei Technologies Co., Ltd
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#include <gtest/gtest.h>


#define protected public
#define private public
#include "graph/utils/tensor_utils.h"
#include "graph/utils/graph_utils.h"
#include "graph/utils/op_desc_utils.h"
#include "graph/utils/attr_utils.h"
#include "common/util/compress/compress.h"
#include "common/configuration.h"
#include "fusion_manager/fusion_manager.h"
#include "graph_optimizer/graph_fusion/fusion_pass_manager/builtin_pass/conv_weight_compress_fusion_pass.h"
#include "common/pass_manager.h"
#include "graph_optimizer/fusion_common/fusion_pass_manager.h"
#undef protected
#undef private

using namespace std;
using namespace ge;
using namespace fe;

#define KERNEL_NUM  2

    /**
  * @ingroup dnn
  * @brief mode of convolution
  */
typedef enum tag_cc_convolution_mode
{
    CC_CONV_CONVOLUTION = 0,            /**< math convolution */
    CC_CONV_CROSS_CORRELATION,          /**< cross-correlation convolution */
    CC_CONV_DECONVOLUTION,              /**< deconvolution, also named transposed convolution*/
    CC_CONV_MODE_DEPTHWISE,             /**< depthwise convolution*/
    CC_CONV_MODE_RESERVED
} ccConvolutionMode_t;

/**
  * @ingroup dnn
  * @brief mode of padding
  */
typedef enum tag_cc_padding_mode
{
    CC_PADDING_CEIL = 0,             /**< Default padding mode, same with caffe, same with MxNet full mode */
    CC_PADDING_DIRECTASSIGN,         /**< Same with caffe2 default padding mode: NOTSET */
    CC_PADDING_VALID,                /**< VALID padding mode , same with tensorflow VALID mode, same with MxNet valid */
    CC_PADDING_SAME,                 /**< Padding values of 0 are always used */
    CC_PADDING_CEIL_NEW,             /*new ceil,use for backward compatibility*/
    CC_PADDING_VALID_NEW,            /*new valid,use for backward compatibility*/
    CC_PADDING_SAME_NEW,             /*new same,use for backward compatibility*/
    CC_PADDING_RESERVED
} ccPaddingMode_t;


class fusion_pass_conv_weight_compress_st : public testing::Test
{
public:
  FEGraphOptimizerPtr graph_optimizer_ptr;
protected:
    void SetUp()
    {
      OpStoreAdapterManagerPtr op_store_adapter_manager = make_shared<OpStoreAdapterManager>();
      FEOpsKernelInfoStorePtr ops_kernel_info_store = make_shared<FEOpsKernelInfoStore>(op_store_adapter_manager, AI_CORE_NAME);
      graph_optimizer_ptr = make_shared<FEGraphOptimizer>(ops_kernel_info_store, op_store_adapter_manager, AI_CORE_NAME);
    }
    void TearDown()
    {

    }

protected:
  static void InitConvOp(NodePtr node)
  {
    int8_t sample_conv_weight[KERNEL_NUM][2][2][2]=
                {
                    {
                        {{1,2},{3,4}},
                        {{4,3},{2,1}}
                    },
                    {
                        {{2,1},{4,3}},
                        {{3,4},{1,2}}
                    }
                };

    int32_t sample_conv_bias[KERNEL_NUM] =
                {
                    1,3
                };
    vector<GeTensorPtr> conv_weights = OpDescUtils::MutableWeights(node);

    vector<int64_t> dim(4, 2);
    dim[0] = KERNEL_NUM;
    GeShape shape(dim);
    GeTensorDesc out_desc(shape);
    TensorUtils::SetDataOffset(out_desc, 0);

    GeTensorPtr filter = std::make_shared<ge::GeTensor>(out_desc, (uint8_t *)sample_conv_weight, KERNEL_NUM * 2 * 2 * 2 * sizeof(int8_t));

    vector<int64_t> dim_bias(2, 1);
    dim_bias[1] = KERNEL_NUM;
    GeTensorDesc out_desc_bias(shape);
    TensorUtils::SetDataOffset(out_desc_bias, 0);
    GeTensorPtr bias = std::make_shared<ge::GeTensor>(out_desc_bias, (uint8_t *)sample_conv_bias, 2 * sizeof(int32_t));

    conv_weights.push_back(filter);
    conv_weights.push_back(bias);
    OpDescUtils::SetWeights(node, conv_weights);

    AttrUtils::SetInt(node->GetOpDesc(), CONV_ATTR_NAME_MODE, CC_CONV_CONVOLUTION);
    AttrUtils::SetInt(node->GetOpDesc(), CONV_ATTR_NAME_GROUP, 1);
    AttrUtils::SetInt(node->GetOpDesc(), CONV_ATTR_NAME_PAD_MODE, CC_PADDING_VALID);
    AttrUtils::SetInt(node->GetOpDesc(), CONV_ATTR_NAME_ALGO, -1);

    vector<int64_t> pad(4, 1);
    AttrUtils::SetListInt(node->GetOpDesc(), CONV_ATTR_NAME_PAD, pad);

    vector<int64_t> stride(2, 2);
    AttrUtils::SetListInt(node->GetOpDesc(), CONV_ATTR_NAME_STRIDE, stride);

    vector<string> input_name_vec;
    input_name_vec.push_back("x");
    input_name_vec.push_back("filter");
    input_name_vec.push_back("bias");
    node->GetOpDesc()->SetInputName(input_name_vec);
  }

  static void InitQuantOp(NodePtr node)
  {
    AttrUtils::SetFloat(node->GetOpDesc(), "scale", 1.1);
    AttrUtils::SetFloat(node->GetOpDesc(), "offset", 1.2);
  }

  static void InitDequantOp(NodePtr node)
  {
    uint64_t sample_deq_scale[KERNEL_NUM] = {0x00001100392BCD31,
                                             0x000022003717AB06};
    vector<GeTensorPtr> scale_weights = OpDescUtils::MutableWeights(node);

    vector<int64_t> dim{KERNEL_NUM};
    GeShape shape(dim);
    GeTensorDesc out_desc(shape);
    TensorUtils::SetDataOffset(out_desc, 0);

    GeTensorPtr scale_weight = std::make_shared<ge::GeTensor>(out_desc, (uint8_t *)sample_deq_scale, KERNEL_NUM * sizeof(uint64_t));

    scale_weights.push_back(scale_weight);
    OpDescUtils::SetWeights(node, scale_weights);
  }

  static ComputeGraphPtr CreateGraphWithOneConv(DataType data_type)
  {
      ComputeGraphPtr graph = std::make_shared<ComputeGraph>("test");
      OpDescPtr op_desc_data = std::make_shared<OpDesc>("data", "Data");
      OpDescPtr op_desc_quant = std::make_shared<OpDesc>("A", "AscendQuant");
      OpDescPtr op_desc_conv = std::make_shared<OpDesc>("B", "Conv2D");
      OpDescPtr op_desc_dequant = std::make_shared<OpDesc>("C", "AscendDequant");
      OpDescPtr op_desc_relu = std::make_shared<OpDesc>("D", "Relu");

      //add descriptor
      vector<int64_t> dim(4, 4);
      GeShape shape(dim);
      GeTensorDesc out_desc(shape);
      out_desc.SetFormat(FORMAT_NCHW);
      out_desc.SetOriginFormat(FORMAT_NCHW);
      out_desc.SetDataType(data_type);
      out_desc.SetOriginDataType(data_type);

      op_desc_data->AddOutputDesc(out_desc);
      op_desc_quant->AddInputDesc(out_desc);
      op_desc_quant->AddOutputDesc(out_desc);
      op_desc_conv->AddInputDesc(out_desc);
      op_desc_conv->AddOutputDesc(out_desc);
      op_desc_dequant->AddInputDesc(out_desc);
      op_desc_dequant->AddOutputDesc(out_desc);
      op_desc_relu->AddInputDesc(out_desc);

      NodePtr node_data = graph->AddNode(op_desc_data);
      NodePtr node_quant = graph->AddNode(op_desc_quant);
      NodePtr node_conv = graph->AddNode(op_desc_conv);
      NodePtr node_dequant = graph->AddNode(op_desc_dequant);
      NodePtr node_relu = graph->AddNode(op_desc_relu);

      InitConvOp(node_conv);
      AttrUtils::SetBool(node_conv->GetOpDesc(), ATTR_NAME_COMPRESS_WEIGHT, true);

      InitQuantOp(node_quant);
      InitDequantOp(node_dequant);

      GraphUtils::AddEdge(node_data->GetOutDataAnchor(0), node_quant->GetInDataAnchor(0));
      GraphUtils::AddEdge(node_quant->GetOutDataAnchor(0), node_conv->GetInDataAnchor(0));
      GraphUtils::AddEdge(node_conv->GetOutDataAnchor(0), node_dequant->GetInDataAnchor(0));
      GraphUtils::AddEdge(node_dequant->GetOutDataAnchor(0), node_relu->GetInDataAnchor(0));

      return graph;
  }

  static ComputeGraphPtr CreateGraphWithOneConvCompress()
  {
    ComputeGraphPtr graph = std::make_shared<ComputeGraph>("test");
    OpDescPtr op_desc_data = std::make_shared<OpDesc>("data", "Data");
    OpDescPtr op_desc_quant = std::make_shared<OpDesc>("quant", "AscendQuant");
    OpDescPtr op_desc_conv = std::make_shared<OpDesc>("conv_compress", "Conv2DCompress");
    OpDescPtr op_desc_dequant = std::make_shared<OpDesc>("dequant", "AscendDequant");
    OpDescPtr op_desc_relu = std::make_shared<OpDesc>("relu", "Relu");
    OpDescPtr op_desc_const = std::make_shared<OpDesc>("const", "Const");

    //add descriptor
    vector<int64_t> dim(4, 4);
    GeShape shape(dim);
    GeTensorDesc out_desc(shape);
    out_desc.SetFormat(FORMAT_NCHW);
    out_desc.SetOriginFormat(FORMAT_NCHW);
    out_desc.SetDataType(DT_INT8);
    out_desc.SetOriginDataType(DT_INT8);

    op_desc_data->AddOutputDesc(out_desc);
    op_desc_quant->AddInputDesc(out_desc);
    op_desc_quant->AddOutputDesc(out_desc);
    op_desc_conv->AddInputDesc(out_desc);
    op_desc_conv->AddInputDesc(out_desc);
    op_desc_conv->AddInputDesc(out_desc);
    op_desc_conv->AddInputDesc(out_desc);
    op_desc_conv->AddOutputDesc(out_desc);
    op_desc_dequant->AddInputDesc(out_desc);
    op_desc_dequant->AddOutputDesc(out_desc);
    op_desc_relu->AddInputDesc(out_desc);
    op_desc_const->AddOutputDesc(out_desc);

    vector<string> input_name_vec;
    input_name_vec.push_back("x");
    input_name_vec.push_back("filter");
    input_name_vec.push_back("bias");
    op_desc_conv->SetInputName(input_name_vec);

    NodePtr node_data = graph->AddNode(op_desc_data);
    NodePtr node_quant = graph->AddNode(op_desc_quant);
    NodePtr node_conv = graph->AddNode(op_desc_conv);
    NodePtr node_dequant = graph->AddNode(op_desc_dequant);
    NodePtr node_relu = graph->AddNode(op_desc_relu);
    NodePtr node_const = graph->AddNode(op_desc_const);

    AttrUtils::SetBool(node_conv->GetOpDesc(), ATTR_NAME_COMPRESS_WEIGHT, true);

    GraphUtils::AddEdge(node_data->GetOutDataAnchor(0), node_quant->GetInDataAnchor(0));
    GraphUtils::AddEdge(node_quant->GetOutDataAnchor(0), node_conv->GetInDataAnchor(0));
    GraphUtils::AddEdge(node_const->GetOutDataAnchor(0), node_conv->GetInDataAnchor(1));
    GraphUtils::AddEdge(node_conv->GetOutDataAnchor(0), node_dequant->GetInDataAnchor(0));
    GraphUtils::AddEdge(node_dequant->GetOutDataAnchor(0), node_relu->GetInDataAnchor(0));

    return graph;
  }

  static ComputeGraphPtr CreateGraphWithTwoConv()
  {
    ComputeGraphPtr graph = std::make_shared<ComputeGraph>("test2");

    OpDescPtr op_desc_data = std::make_shared<OpDesc>("data", "Data");
    OpDescPtr op_desc_quant = std::make_shared<OpDesc>("ascend_quant", "AscendQuant");
    OpDescPtr op_desc_conv1 = std::make_shared<OpDesc>("conv", "Conv2D");
    OpDescPtr op_desc_dequant1 = std::make_shared<OpDesc>("ascend_dequant", "AscendDequant");
    OpDescPtr op_desc_relu1 = std::make_shared<OpDesc>("relu", "Relu");

    OpDescPtr op_desc_conv2 = std::make_shared<OpDesc>("another_conv", "Conv2D");
    OpDescPtr op_desc_dequant2 = std::make_shared<OpDesc>("another_dequant", "AscendDequant");
    OpDescPtr op_desc_relu2 = std::make_shared<OpDesc>("another_relu", "Relu");

    //add descriptor
    vector<int64_t> dim(4, 4);
    GeShape shape(dim);
    GeTensorDesc out_desc(shape);
    out_desc.SetFormat(FORMAT_NCHW);
    out_desc.SetOriginFormat(FORMAT_NCHW);
    out_desc.SetDataType(DT_INT8);
    out_desc.SetOriginDataType(DT_INT8);

    op_desc_data->AddOutputDesc(out_desc);

    op_desc_quant->AddInputDesc(out_desc);
    op_desc_quant->AddOutputDesc(out_desc);

    op_desc_conv1->AddInputDesc(out_desc);
    op_desc_conv1->AddOutputDesc(out_desc);

    op_desc_dequant1->AddInputDesc(out_desc);
    op_desc_dequant1->AddOutputDesc(out_desc);

    op_desc_relu1->AddInputDesc(out_desc);

    op_desc_conv2->AddInputDesc(out_desc);
    op_desc_conv2->AddOutputDesc(out_desc);

    op_desc_dequant2->AddInputDesc(out_desc);
    op_desc_dequant2->AddOutputDesc(out_desc);

    op_desc_relu2->AddInputDesc(out_desc);

    NodePtr node_data = graph->AddNode(op_desc_data);
    NodePtr node_quant = graph->AddNode(op_desc_quant);
    NodePtr node_conv1 = graph->AddNode(op_desc_conv1);
    NodePtr node_dequant1 = graph->AddNode(op_desc_dequant1);
    NodePtr node_relu1 = graph->AddNode(op_desc_relu1);

    NodePtr node_conv2 = graph->AddNode(op_desc_conv2);
    NodePtr node_dequant2 = graph->AddNode(op_desc_dequant2);
    NodePtr node_relu2 = graph->AddNode(op_desc_relu2);

    InitQuantOp(node_quant);
    InitConvOp(node_conv1);
    AttrUtils::SetBool(node_conv1->GetOpDesc(), ATTR_NAME_COMPRESS_WEIGHT, true);

    InitDequantOp(node_dequant1);
    InitConvOp(node_conv2);
    InitDequantOp(node_dequant2);

    // connect edge
    GraphUtils::AddEdge(node_data->GetOutDataAnchor(0), node_quant->GetInDataAnchor(0));

    GraphUtils::AddEdge(node_quant->GetOutDataAnchor(0), node_conv1->GetInDataAnchor(0));
    GraphUtils::AddEdge(node_conv1->GetOutDataAnchor(0), node_dequant1->GetInDataAnchor(0));
    GraphUtils::AddEdge(node_dequant1->GetOutDataAnchor(0), node_relu1->GetInDataAnchor(0));

    GraphUtils::AddEdge(node_quant->GetOutDataAnchor(0), node_conv2->GetInDataAnchor(0));
    GraphUtils::AddEdge(node_conv2->GetOutDataAnchor(0), node_dequant2->GetInDataAnchor(0));
    GraphUtils::AddEdge(node_dequant2->GetOutDataAnchor(0), node_relu2->GetInDataAnchor(0));
    return graph;
  }

  static ComputeGraphPtr CreateGraphWithOtherTwoConv()
  {
    ComputeGraphPtr graph = std::make_shared<ComputeGraph>("test2");

    OpDescPtr op_desc_data = std::make_shared<OpDesc>("data", "Data");
    OpDescPtr op_desc_quant = std::make_shared<OpDesc>("ascend_quant", "AscendQuant");
    OpDescPtr op_desc_conv1 = std::make_shared<OpDesc>("conv", "Conv2D");
    OpDescPtr op_desc_dequant1 = std::make_shared<OpDesc>("ascend_dequant", "AscendDequant");
    OpDescPtr op_desc_relu1 = std::make_shared<OpDesc>("relu", "Relu");

    OpDescPtr op_desc_conv2 = std::make_shared<OpDesc>("another_conv", "Conv2D");
    OpDescPtr op_desc_dequant2 = std::make_shared<OpDesc>("another_dequant", "AscendDequant");
    OpDescPtr op_desc_relu2 = std::make_shared<OpDesc>("another_relu", "Relu");

    OpDescPtr op_desc_relu3 = std::make_shared<OpDesc>("relu3", "Relu");

    //add descriptor
    vector<int64_t> dim(4, 4);
    GeShape shape(dim);
    GeTensorDesc out_desc(shape);
    out_desc.SetFormat(FORMAT_NCHW);
    out_desc.SetOriginFormat(FORMAT_NCHW);
    out_desc.SetDataType(DT_INT8);
    out_desc.SetOriginDataType(DT_INT8);

    op_desc_data->AddOutputDesc(out_desc);

    op_desc_quant->AddInputDesc(out_desc);
    op_desc_quant->AddOutputDesc(out_desc);

    op_desc_conv1->AddInputDesc(out_desc);
    op_desc_conv1->AddOutputDesc(out_desc);

    op_desc_dequant1->AddInputDesc(out_desc);
    op_desc_dequant1->AddOutputDesc(out_desc);

    op_desc_relu1->AddInputDesc(out_desc);

    op_desc_conv2->AddInputDesc(out_desc);
    op_desc_conv2->AddOutputDesc(out_desc);

    op_desc_dequant2->AddInputDesc(out_desc);
    op_desc_dequant2->AddOutputDesc(out_desc);

    op_desc_relu2->AddInputDesc(out_desc);

    op_desc_relu3->AddInputDesc(out_desc);
    op_desc_relu3->AddOutputDesc(out_desc);

    NodePtr node_data = graph->AddNode(op_desc_data);
    NodePtr node_quant = graph->AddNode(op_desc_quant);
    NodePtr node_conv1 = graph->AddNode(op_desc_conv1);
    NodePtr node_dequant1 = graph->AddNode(op_desc_dequant1);
    NodePtr node_relu1 = graph->AddNode(op_desc_relu1);

    NodePtr node_conv2 = graph->AddNode(op_desc_conv2);
    NodePtr node_dequant2 = graph->AddNode(op_desc_dequant2);
    NodePtr node_relu2 = graph->AddNode(op_desc_relu2);

    NodePtr node_relu3 = graph->AddNode(op_desc_relu3);

    InitQuantOp(node_quant);
    InitConvOp(node_conv1);
    AttrUtils::SetBool(node_conv1->GetOpDesc(), ATTR_NAME_COMPRESS_WEIGHT, true);
    AttrUtils::SetBool(node_conv2->GetOpDesc(), ATTR_NAME_COMPRESS_WEIGHT, true);

    InitDequantOp(node_dequant1);
    InitConvOp(node_conv2);
    InitDequantOp(node_dequant2);

    // connect edge
    GraphUtils::AddEdge(node_data->GetOutDataAnchor(0), node_quant->GetInDataAnchor(0));
    GraphUtils::AddEdge(node_quant->GetOutDataAnchor(0), node_conv1->GetInDataAnchor(0));
    GraphUtils::AddEdge(node_conv1->GetOutDataAnchor(0), node_dequant1->GetInDataAnchor(0));
    GraphUtils::AddEdge(node_dequant1->GetOutDataAnchor(0), node_relu1->GetInDataAnchor(0));

    string other_type = "Variable";
    // add relu node between conv and weight
    OutDataAnchorPtr out_data_anchor = node_conv1->GetInDataAnchor(1)->GetPeerOutAnchor();
    NodePtr weight_node = out_data_anchor->GetOwnerNode();
    weight_node->GetOpDesc()->SetType(other_type);
    GraphUtils::RemoveEdge(out_data_anchor, node_conv1->GetInDataAnchor(1));
    GraphUtils::AddEdge(out_data_anchor, node_relu3->GetInDataAnchor(0));
    GraphUtils::AddEdge(node_relu3->GetOutDataAnchor(0), node_conv1->GetInDataAnchor(1));

    GraphUtils::AddEdge(node_quant->GetOutDataAnchor(0), node_conv2->GetInDataAnchor(0));
    GraphUtils::AddEdge(node_conv2->GetOutDataAnchor(0), node_dequant2->GetInDataAnchor(0));
    GraphUtils::AddEdge(node_dequant2->GetOutDataAnchor(0), node_relu2->GetInDataAnchor(0));

    // change op type of weight node
    OutDataAnchorPtr out_data_anchor2 = node_conv2->GetInDataAnchor(1)->GetPeerOutAnchor();
    NodePtr weight_node2 = out_data_anchor2->GetOwnerNode();
    weight_node2->GetOpDesc()->SetType(other_type);
    return graph;
  }
};

CmpStatus CompressWeightsStub1(char* input,
                          const CompressConfig& compress_config,
                          char* indexs,
                          char* output,
                          size_t& compressed_length) {
    compressed_length = compress_config.inputSize * 0.7;
    *indexs = '7';
    *output = '7';
    return RET_SUCCESS;
}

CmpStatus CompressWeightsStub2(char* input,
                               const CompressConfig& compress_config,
                               char* indexs,
                               char* output,
                               size_t& compressed_length) {
    compressed_length = compress_config.inputSize;
    *indexs = '7';
    *output = '7';
    return RET_SUCCESS;
}

TEST_F(fusion_pass_conv_weight_compress_st, fusion_success_case7)
{
  ComputeGraphPtr graph = CreateGraphWithOneConvCompress();
  size_t size_before = graph->GetDirectNode().size();
  FE_LOGD("The number of nodes before is %zu.", size_before);

  // insert compress node
  Status status = graph_optimizer_ptr->InsertCompressOP(*(graph.get()));
  EXPECT_EQ(fe::SUCCESS, status);
  bool find_compress = false;
  for (NodePtr node : graph->GetDirectNode()) {
    if (node->GetType() == "Compress") {
      find_compress = true;
      OpDescPtr op_desc = node->GetOpDesc();
      EXPECT_EQ(op_desc->GetInputNameByIndex(0), "weight");
      EXPECT_EQ(op_desc->GetOutputNameByIndex(0), "weight_compress");
      EXPECT_EQ(op_desc->GetOutputNameByIndex(1), "compress_index");
    }
    if (node->GetType() == "Conv2DCompress") {
      vector<string> input_name_vec = node->GetOpDesc()->GetInputName();
      EXPECT_EQ(input_name_vec.size(), 4);
    }
  }
  EXPECT_EQ(find_compress, true);
}