#include <iostream>
#include <string>
#include <algorithm>
#include <iosfwd>
#include "include/api/model.h"
#include "include/api/serialization.h"
#include "include/api/context.h"
#include "minddata/dataset/include/execute.h"
#include "minddata/dataset/include/vision.h"
#include <gflags/gflags.h>
#include <dirent.h>
#include <fstream>
#include "utils.h"

using namespace mindspore;
using namespace mindspore::dataset;
using namespace mindspore::dataset::vision;

DEFINE_string(mindir_path, "", "mindir path");
DEFINE_string(dataset_path, ".", "dataset path");
DEFINE_int32(device_id, 0, "device id");
DEFINE_string(val_txt_path, "", "val txt path");

static size_t GetMax(MSTensor data) {
  float max_value = -1;
  size_t max_idx = -1;
  float *p = (float *) data.MutableData();
  for (size_t i = 0; i < data.DataSize() / sizeof(float); ++i) {
    if (p[i] > max_value) {
      max_value = p[i];
      max_idx = i;
    }
  }

  return max_idx;
}

static size_t GetLabel(const std::string &img, std::map<std::string, size_t> &val) {
  auto pos = img.find_last_of("/");
  std::string name = img.substr(pos + 1);
  return val[name];
}

int main(int argc, char **argv) {
  gflags::ParseCommandLineFlags(&argc, &argv, true);
  if (RealPath(FLAGS_mindir_path).empty()) {
    std::cout << "Invalid mindir" << std::endl;
    return 1;
  }

  // prepare val
  std::map<std::string, size_t> results;
  std::ifstream dir(FLAGS_val_txt_path);
  if (!dir.is_open()) {
    throw std::runtime_error("");
  }
  std::string line;
  while (getline(dir, line)) {
    if (!line.empty()) {
      auto pos = line.find(" ");
      if (pos == std::string::npos) {

      }
      results.emplace(line.substr(0, pos), std::strtol(line.substr(pos + 1).c_str(), nullptr, 10));
    }
  }

  GlobalContext::SetGlobalDeviceTarget("Ascend310");
  GlobalContext::SetGlobalDeviceID(FLAGS_device_id);
  auto graph = Serialization::LoadModel(FLAGS_mindir_path, ModelType::kMindIR);
  Model model((GraphCell(graph)));

  std::map<std::string, std::string> build_options;
  Status ret = model.Build();
  if (ret != kSuccess) {
    std::cout << "EEEEEEEERROR Build failed." << std::endl;
    return 1;
  }

  // prepare input
  auto all_files = GetAllFiles(FLAGS_dataset_path);
  if (all_files.empty()) {
    std::cout << "ERROR: no input data." << std::endl;
    return 1;
  }

  // infer
  // define transforms
  std::shared_ptr<TensorTransform> decode(new Decode());
  std::shared_ptr<TensorTransform> resize(new Resize({256}));
  std::shared_ptr<TensorTransform> normalize(new Normalize({0.485 * 255, 0.456 * 255, 0.406 * 255},
                                                                           {0.229 * 255, 0.224 * 255, 0.225 * 255}));
  std::shared_ptr<TensorTransform> center_crop(new CenterCrop({224, 224}));
  std::shared_ptr<TensorTransform> hwc2chw(new HWC2CHW());

  // define preprocessor
  Execute compose({decode, resize, normalize, center_crop, hwc2chw});

  size_t count = 0;
  size_t size = all_files.size();
  for (size_t i = 0; i < size; ++i) {
    std::vector<MSTensor> inputs;
    std::vector<MSTensor> outputs;
    std::cout << "Start predict input files:" << all_files[i] << std::endl;
    MSTensor img;
    ret = compose(ReadFileToTensor(all_files[i]), &img);
    if (ret != kSuccess) {
      std::cout << "Preprocess image failed." << std::endl;
      return 1;
    }
    inputs.emplace_back(img);

    ret = model.Predict(inputs, &outputs);
    if (ret != kSuccess) {
      std::cout << "Predict " << all_files[i] << " failed." << std::endl;
      return 1;
    }
    std::cout << "Predict " << all_files[i] << " success." << std::endl;
    if (GetMax(outputs[0]) == GetLabel(all_files[i], results)) {
      ++count;
    }
  }
  std::cout << "acc: " << static_cast<double>(count) / all_files.size() * 100.0 << " %" << std::endl;
  return 0;
}

