#include <iostream>
#include <string>
#include <algorithm>
#include <iosfwd>
#include "include/api/model.h"
#include "include/api/serialization.h"
#include "include/api/context.h"
#include <gflags/gflags.h>
#include <dirent.h>
#include "utils.h"

using namespace mindspore;

DEFINE_string(mindir_path, "", "mindir path");
DEFINE_string(dataset_path, ".", "dataset path");
DEFINE_int32(device_id, 0, "device id");
DEFINE_string(precision_mode, "", "precision mode");
DEFINE_string(op_select_impl_mode, "", "op select impl mode");

bool Predict(Model &model, const std::vector<MSTensor> &inputs, const std::string &input_data);

int main(int argc, char **argv) {
  // check flags
  gflags::ParseCommandLineFlags(&argc, &argv, true);
  auto model_context = std::make_shared<ModelContext>();
  if (!FLAGS_precision_mode.empty()) {
    ModelContext::SetPrecisionMode(model_context, FLAGS_precision_mode);
  }
  if (!FLAGS_op_select_impl_mode.empty()) {
    ModelContext::SetOpSelectImplMode(model_context, FLAGS_op_select_impl_mode);
  }
  if (RealPath(FLAGS_mindir_path).empty()) {
    std::cout << "Invalid mindir" << std::endl;
    return 1;
  }

  GlobalContext::SetGlobalDeviceTarget("Ascend310");
  GlobalContext::SetGlobalDeviceID(FLAGS_device_id);
  auto graph = Serialization::LoadModel(FLAGS_mindir_path, ModelType::kMindIR);
  Model model(GraphCell(graph), model_context);
  Status ret = model.Build();
  if (ret != kSuccess) {
    std::cout << "EEEEEEEERROR Build failed." << std::endl;
    return 1;
  }

  // get model inputs info
  auto model_inputs = model.GetInputs();
  std::cout << "Model has " << model_inputs.size() << " inputs: " << std::endl;
  for (auto in : model_inputs) {
    std::cout << "Name: " << in.Name() << " memory size: " << in.DataSize() << " DataType: "
              << static_cast<int>(in.DataType()) << std::endl;
  }

  // prepare input
  auto all_files = GetAllInputData(FLAGS_dataset_path);
  if (all_files.empty()) {
    std::cout << "ERROR: no input data." << std::endl;
    return 1;
  }

  size_t file_num = all_files[0].size();
  for (const auto &in_files : all_files) {
    if (in_files.size() != file_num || all_files.size() != model_inputs.size()) {
      std::cout << "ERROR: invalid file numbers of input." << std::endl;
      return 1;
    }
  }

  // infer
  for (size_t i = 0; i < file_num; ++i) {
    std::vector<Buffer> inputs_data;
    std::vector<MSTensor> inputs;
    std::cout << "Start predict input files:" << std::endl;
    for (size_t j = 0; j < all_files.size(); ++j) {
      std::cout << all_files[j][i] << std::endl;
      inputs_data.push_back(ReadFile(all_files[j][i]));
      inputs.push_back(MSTensor::CreateRefTensor(model_inputs[j].Name(), model_inputs[j].DataType(),
                                                 model_inputs[j].Shape(), inputs_data[j].MutableData(),
                                                 inputs_data[j].DataSize()));
    }

    if (!Predict(model, inputs, all_files[0][i])) {
      std::cout << "Predict " << all_files[0][i] << " failed." << std::endl;
      return 1;
    }
    std::cout << "Predict " << all_files[0][i] << " success." << std::endl;
  }

  return 0;
}

bool Predict(Model &model, const std::vector<MSTensor> &inputs, const std::string &input_data) {
  std::vector<MSTensor> outputs;
  auto ret = model.Predict(inputs, &outputs);
  if (ret != kSuccess) {
    std::cout << "ERROR Predict" << std::endl;
    return false;
  }

  for (size_t i = 0; i < outputs.size(); ++i) {
    SaveFile(i, outputs[i], input_data);
  }
  return true;
}
