/**
 * ============================================================================
 *
 * Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved.
 *
 * Redistribution and use in source and binary forms, with or without
 * modification, are permitted provided that the following conditions are met:
 *
 *   1 Redistributions of source code must retain the above copyright notice,
 *     this list of conditions and the following disclaimer.
 *
 *   2 Redistributions in binary form must reproduce the above copyright notice,
 *     this list of conditions and the following disclaimer in the documentation
 *     and/or other materials provided with the distribution.
 *
 *   3 Neither the names of the copyright holders nor the names of the
 *   contributors may be used to endorse or promote products derived from this
 *   software without specific prior written permission.
 *
 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
 * POSSIBILITY OF SUCH DAMAGE.
 * ============================================================================
 */
#include <sys/time.h>
#include <vector>
#include "hiaiengine/log.h"
#include "hiaiengine/ai_types.h"
#include "hiaiengine/ai_model_parser.h"
#include "style_transfer_inference.h"
#include "util.hpp"

using hiai::Engine;
using hiai::ImageData;

namespace {
// output port (engine port begin with 0)
const uint32_t kSendDataPort = 0;
}

// register custom data type
HIAI_REGISTER_DATA_TYPE("EngineTransT", EngineTransT);
HIAI_REGISTER_DATA_TYPE("OutputT", OutputT);
HIAI_REGISTER_DATA_TYPE("BatchImageParaWithScaleT", BatchImageParaWithScaleT);

StyleTransferInference::StyleTransferInference()
  : candy_model_manager_(nullptr)
  , starrynight_model_manager_(nullptr)
  , picasso_model_manager_(nullptr)
  , worksoldiers_model_manager_(nullptr)
  , total_number_(0)
  , style_id_(0) {
    //
}

HIAI_StatusT StyleTransferInference::Init(
    const hiai::AIConfig& config,
    const std::vector<hiai::AIModelDescription>& model_desc) {
  HIAI_ENGINE_LOG("Start initialize!");

  // initialize aiModelManager
  if (candy_model_manager_ == nullptr) {
    candy_model_manager_ = std::make_shared<hiai::AIModelManager>();
  }
  if (starrynight_model_manager_ == nullptr) {
    starrynight_model_manager_ = std::make_shared<hiai::AIModelManager>();
  }
  if (worksoldiers_model_manager_ == nullptr) {
    worksoldiers_model_manager_ = std::make_shared<hiai::AIModelManager>();
  }
  if (picasso_model_manager_ == nullptr) {
    picasso_model_manager_ = std::make_shared<hiai::AIModelManager>();
  }

  // get parameters from graph.config
  // set model path and passcode to AI model description
  for (int index = 0; index < config.items_size(); index++) {
    const ::hiai::AIConfigItem& item = config.items(index);
    hiai::AIModelDescription fd_model_desc;
    std::vector<hiai::AIModelDescription> model_desc_vec;
    // get model path
    if (item.name() == "starrynight_model_path") {
      fd_model_desc.set_path(item.value().data());
      model_desc_vec.push_back(fd_model_desc);
      if (hiai::SUCCESS != starrynight_model_manager_->Init(config, model_desc_vec)) {
        HIAI_ENGINE_LOG(HIAI_GRAPH_INVALID_VALUE, "initialize starrynight model failed");
        return HIAI_ERROR;
      }
    } else if (item.name() == "candy_model_path") {
      fd_model_desc.set_path(item.value().data());
      model_desc_vec.push_back(fd_model_desc);
      if (hiai::SUCCESS != candy_model_manager_->Init(config, model_desc_vec)) {
        HIAI_ENGINE_LOG(HIAI_GRAPH_INVALID_VALUE, "initialize candy model failed");
        return HIAI_ERROR;
      }
    } else if (item.name() == "worksoldiers_model_path") {
      fd_model_desc.set_path(item.value().data());
      model_desc_vec.push_back(fd_model_desc);
      if (hiai::SUCCESS != worksoldiers_model_manager_->Init(config, model_desc_vec)) {
        HIAI_ENGINE_LOG(HIAI_GRAPH_INVALID_VALUE, "initialize worksoldiers model failed");
        return HIAI_ERROR;
      }
    } else if (item.name() == "picasso_model_path") {
      fd_model_desc.set_path(item.value().data());
      model_desc_vec.push_back(fd_model_desc);
      if (hiai::SUCCESS != picasso_model_manager_->Init(config, model_desc_vec)) {
        HIAI_ENGINE_LOG(HIAI_GRAPH_INVALID_VALUE, "initialize picasso model failed");
        return HIAI_ERROR;
      }
    }
  }

  HIAI_ENGINE_LOG("End initialize!");
  return HIAI_OK;
}


HIAI_IMPL_ENGINE_PROCESS("style_transfer_inference",
    StyleTransferInference, INPUT_SIZE) {

  // Deal arg0 and arg1 (register type input), Here we ignore the arg1 nullptr input. 
  // If it's nullptr, use the history style type.
  if (arg1 != nullptr) {
    shared_ptr<StyleType> register_result = static_pointer_cast < StyleType > (arg1);
    style_id_ = register_result->type;
    HIAI_ENGINE_LOG(HIAI_ENGINE_RUN_ARGS_NOT_RIGHT, "register value:%d", style_id_);
  }
  if (arg0 == nullptr) {
    HIAI_ENGINE_LOG(HIAI_ENGINE_RUN_ARGS_NOT_RIGHT, "arg0 input is nullptr.");
    return HIAI_ERROR;
  }

  std::shared_ptr<BatchImageParaWithScaleT> image_handle =
      std::static_pointer_cast<BatchImageParaWithScaleT>(arg0);

  ImageData<u_int8_t>& resized_img = image_handle->v_img[0].img;
  uint32_t all_input_size = resized_img.size * sizeof(uint8_t);
  // input size is less than zero, do not need to inference
  if (all_input_size <= 0) {
    HIAI_ENGINE_LOG(HIAI_ENGINE_RUN_ARGS_NOT_RIGHT,
                    "all input image size=%u is less than zero",
                    all_input_size);
    return HIAI_ERROR;
  }

  // neural buffer
  std::shared_ptr<EngineTransT> trans_data = std::make_shared<EngineTransT>();
  trans_data->b_info = image_handle->b_info;
  std::shared_ptr<hiai::AINeuralNetworkBuffer> neural_buf = std::shared_ptr<
      hiai::AINeuralNetworkBuffer>(
      new hiai::AINeuralNetworkBuffer(),
      std::default_delete<hiai::AINeuralNetworkBuffer>());
  neural_buf->SetBuffer((void*) resized_img.data.get(), all_input_size);

  // create input data
  std::shared_ptr<hiai::IAITensor> input_data = std::static_pointer_cast<
      hiai::IAITensor>(neural_buf);
  std::vector<std::shared_ptr<hiai::IAITensor>> input_data_vec;
  std::vector<std::shared_ptr<hiai::IAITensor>> output_data_vector;
  input_data_vec.push_back(input_data);

  // 1. create output tensor
  hiai::AIStatus ret = hiai::SUCCESS;
  switch (style_id_)
  {
  case 0:
    ret = picasso_model_manager_->CreateOutputTensor(input_data_vec, output_data_vector);
    break;
  case 1:
    ret = candy_model_manager_->CreateOutputTensor(input_data_vec, output_data_vector);
    break;
  case 2:
    ret = starrynight_model_manager_->CreateOutputTensor(input_data_vec, output_data_vector);
    break;
  case 3:
    ret = worksoldiers_model_manager_->CreateOutputTensor(input_data_vec, output_data_vector);
    break;
  default:
    ret = hiai::FAILED;
    HIAI_ENGINE_LOG(HIAI_ENGINE_RUN_ARGS_NOT_RIGHT, "failed to create output tensor, this type is not support!");
    break;
  }
  // create failed, also need to send data to next engine port 0.
  if (ret != hiai::SUCCESS) {
    HIAI_ENGINE_LOG(HIAI_ENGINE_RUN_ARGS_NOT_RIGHT, "failed to create output tensor, \
          model type:%d(0:picasso,1:candy,2:starry,3:worker)", style_id_);
    trans_data->status = false;
    trans_data->msg = "HiAIInference Engine CreateOutputTensor failed";
    SendData(kSendDataPort, "EngineTransT", std::static_pointer_cast<void>(trans_data));
    return HIAI_ERROR;
  }

  // 2. call process
  unsigned long model_start_time = get_current_time();
  hiai::AIContext ai_context;
  switch (style_id_)
  {
  case 0:
    ret = picasso_model_manager_->Process(ai_context, input_data_vec, output_data_vector, AI_MODEL_PROCESS_TIMEOUT);
    break;
  case 1:
    ret = candy_model_manager_->Process(ai_context, input_data_vec, output_data_vector, AI_MODEL_PROCESS_TIMEOUT);
    break;
  case 2:
    ret = starrynight_model_manager_->Process(ai_context, input_data_vec, output_data_vector, AI_MODEL_PROCESS_TIMEOUT);
    break;
  case 3:
    ret = worksoldiers_model_manager_->Process(ai_context, input_data_vec, output_data_vector, AI_MODEL_PROCESS_TIMEOUT);
    break;
  default:
    ret = hiai::FAILED;
    HIAI_ENGINE_LOG(HIAI_ENGINE_RUN_ARGS_NOT_RIGHT, "failed to inference or process, this type is not support!");
    break;
  }
  // process failed, also need to send data to next engine port 0.
  if (ret != hiai::SUCCESS) {
    HIAI_ENGINE_LOG(HIAI_ENGINE_RUN_ARGS_NOT_RIGHT, "failed to process ai_model, \
          model type:%d(0:picasso,1:candy,2:starry,3:worker)", style_id_);
    trans_data->status = false;
    trans_data->msg = "HiAIInference Engine Process failed";
    SendData(kSendDataPort, "EngineTransT", std::static_pointer_cast<void>(trans_data));
    return HIAI_ERROR;
  }
  unsigned long model_infer_time = get_current_time() - model_start_time;
  
  // generate output data
  unsigned long cp_start_time = get_current_time();
  trans_data->status = true;
  for (uint32_t i = 0; i < output_data_vector.size(); i++) {
    std::shared_ptr<hiai::AISimpleTensor> result_tensor =
        std::static_pointer_cast<hiai::AISimpleTensor>(output_data_vector[i]);
    OutputT out;
    out.size = result_tensor->GetSize();
    out.data = std::shared_ptr<uint8_t>(new uint8_t[out.size],
                                        std::default_delete<uint8_t[]>());
    errno_t mem_ret = memcpy_s(out.data.get(), out.size,
                               result_tensor->GetBuffer(),
                               result_tensor->GetSize());
    // memory copy failed, skip this result
    if (mem_ret != EOK) {
      HIAI_ENGINE_LOG(HIAI_ENGINE_RUN_ARGS_NOT_RIGHT,
                      "dealing results: memcpy_s() error=%d", mem_ret);
      continue;
    }
    trans_data->output_datas.push_back(out);
    unsigned long output_cp_time = get_current_time() - cp_start_time;
    HIAI_ENGINE_LOG(HIAI_ENGINE_RUN_ARGS_NOT_RIGHT, "inference time:%d(us) copy time:%d (us)", model_infer_time, output_cp_time);
  }
  
  // send results and original image data to post process (port 0)
  HIAI_StatusT hiai_ret = SendData(kSendDataPort, "EngineTransT", std::static_pointer_cast<void>(trans_data));
  HIAI_ENGINE_LOG("End process!");
  return hiai_ret;
}
