// Date:   Wed Aug 13 06:01:59 PM 2025
// Mail:   lunar_ubuntu@qq.com
// Author: https://github.com/xiaoqixian

#include "detector.hpp"
#include "common.hpp"
#include "models/yolo.hpp"
#include <cassert>
#include <cstdio>

Detector::Detector(Config config, Context context): 
  sampler_(config["sampler"]),
  config_(config),
  ctx_(context)
{
  ov_core_.set_property("CPU", ov::hint::performance_mode(
      ov::hint::PerformanceMode::CUMULATIVE_THROUGHPUT));

  // init filter model
  {
    auto filter_config = config_["processor"]["frame_filter"]["human_detector"];
    auto model = ov_core_.read_model(filter_config["model_path"].as<std::string>());
    filter_input_width_ = filter_config["input_width"].as<int>();
    filter_input_height_ = filter_config["input_height"].as<int>();
    filter_conf_thresh_ = filter_config["conf_threshold"].as<float>();
    
    model->reshape({{"input", {1, 3, filter_input_width_, filter_input_height_}}});
    filter_model_ = ov_core_.compile_model(model, "CPU");
  }

  // init yolo model
  {
    auto yolo_config = config_["processor"]["yolo_detector"];
    auto model = ov_core_.read_model(yolo_config["model_path"].as<std::string>());
    yolo_input_width_ = yolo_config["input_width"].as<int>();
    yolo_input_height_ = yolo_config["input_height"].as<int>();
    yolo_conf_thresh_ = yolo_config["conf_threshold"].as<float>();
    yolo_nms_thresh_ = yolo_config["nms_threshold"].as<float>();
    
    model->reshape({{"images", {1, 3, yolo_input_width_, yolo_input_height_}}});
    yolo_model_ = ov_core_.compile_model(model, "CPU");
  }
}

void Detector::start() {
  auto const proc_th_num = config_["sampler"]["channels"].size();
  workers_.reserve(proc_th_num);
  for (size_t i = 0; i < proc_th_num; i++) {
    workers_.emplace_back(worker, this, i);
  }

  sampler_.start();
}

void Detector::wait() {
  sampler_.wait();
  stopped_.store(true, std::memory_order_release);
  sampler_.notify_all_consumers();
  printf("Sampler exit\n");
  for (auto& worker: workers_) {
    if (worker.joinable()) worker.join();
  }
  printf("Detector workers exit\n");
  sampler_.stop_decoders();
  printf("Decoders exit\n");
}

void Detector::worker(Detector* self, const size_t ch_id) {
  auto& stopped = self->stopped_;
  auto& sampler = self->sampler_;

  std::vector<FramePkts> buf;
  size_t window_size = 1;
  buf.reserve(window_size);

  static thread_local std::mt19937 gen{std::random_device{}()};  

  cv::Mat prev_img;
  constexpr float FRAME_DIFF_THRESHOLD = 0.1f;

  auto filter_inf_req = self->yolo_model_.create_infer_request();
  auto yolo_inf_req = self->yolo_model_.create_infer_request();

  while (!stopped.load(std::memory_order_relaxed)) {
    auto const cnt = sampler.get_pkts(ch_id, window_size, buf, stopped);
    if (cnt == 0) {
      assert(stopped);
      break;
    }
    
    auto guard = make_scope_guard([&buf]() {
      buf.clear();
    });

    std::uniform_int_distribution<int> pick_rand(0, buf.size()-1);
    auto const sample_pick = pick_rand(gen) % buf.size();
    Frame sample_frame;
    {
      TimeMeasureSpan span("decode");
      auto ft = sampler.decode_keyframe(buf[sample_pick]);
      auto const ft_status = ft.wait_for(std::chrono::seconds(10));
      if (ft_status != std::future_status::ready) {
        throw std::runtime_error("decode future wait long time");
      }

      sample_frame = std::move(ft.get().front());
    }

    if (sample_frame.cv_mat.empty()) continue;

    if (!prev_img.empty() && frame_diff_ratio(prev_img, sample_frame.cv_mat) < FRAME_DIFF_THRESHOLD) {
      continue;
    }
    prev_img = sample_frame.cv_mat;

    bool is_good_frame = false;
    {
      TimeMeasureSpan span("filter");
      auto input_tensor = filter_inf_req.get_input_tensor();
      mat_preprocess(sample_frame.cv_mat, self->filter_input_width_, input_tensor);
      filter_inf_req.infer();

      auto out_tensor = filter_inf_req.get_output_tensor();
      float const* data = out_tensor.data<float>();
      const int pred_cls = std::max_element(data, data + 2) - data;
      is_good_frame = pred_cls == 1;
    }
    if (!is_good_frame) continue;

    {
      auto input_tensor = yolo_inf_req.get_input_tensor();
      yolov5_preprocess(sample_frame.cv_mat, self->yolo_input_width_, self->yolo_input_height_, input_tensor);
      yolo_inf_req.infer();
      
      auto output_tensor = yolo_inf_req.get_output_tensor();
      auto boxes = yolov5_postprocess(
        output_tensor,
        sample_frame.cv_mat.cols,
        sample_frame.cv_mat.rows,
        self->yolo_input_width_,
        self->yolo_input_height_,
        self->yolo_conf_thresh_,
        self->yolo_nms_thresh_
      );
      
      float highest_conf = 0;
      for (auto const& box: boxes) {
        if (box.class_id == 1 && box.score > highest_conf) {
          highest_conf = box.score;
        }
      }

      auto& shot_stream = *(self->shot_stream_);
      if (highest_conf > 0.4f) {
        ActionShot shot = static_cast<FrameBase const&>(buf[sample_pick]);
        shot.cv_mat = sample_frame.cv_mat;
        shot.shot_ms = now_ms();
        shot_stream.try_emplace_back(std::move(shot));
      }
    }
  }

  printf("Detect worker %zd exit\n", ch_id);
}
