//
// Created by lqy on 2024/08/02.
//

#include "video_source.h"
#include <iostream>
#include <filesystem>
#include <vector>

namespace fs = std::filesystem;

namespace alg {

VideoSource::VideoSource(std::string directoryPath, int32_t big_w, int32_t big_h, int32_t sma_w, int32_t sma_h) {
  total_frame_ = 0;
  cur_frame_id_ = 0;
  cur_frame_path_ = directoryPath;
  big_frame_width_ = big_w;
  big_frame_height_ = big_h;
  small_frame_width_ = sma_w;
  small_frame_height_ = sma_h;
}

int32_t VideoSource::SetSmallFrameRes(int32_t width, int32_t height) {
  small_frame_width_ = width;
  small_frame_height_ = height;
  return 0;
}

int32_t VideoSource::GetTotalFrameCount() {
  // 遍历目录获取 JPEG 文件
  for (const auto &entry : fs::directory_iterator(cur_frame_path_)) {
    if (entry.is_regular_file() && entry.path().extension() == ".jpg") {
      total_frame_++;
    }
  }
  LOGI("Total JPEG files: %d", total_frame_);

#if 0
  std::vector<std::string> jpgFiles;
  for (const auto &entry : fs::directory_iterator(cur_frame_path_)) {
    if (entry.is_regular_file() && entry.path().extension() == ".jpg") {
      jpgFiles.push_back(entry.path().string());
    }
  }

  for (const auto &filePath : jpgFiles) {
    cv::Mat rgbImage = cv::imread(filePath, cv::IMREAD_COLOR);
    LOGE("Read image:%s", filePath.c_str());
    if (rgbImage.empty()) {
      LOGE("Error: Unable to load image:%s", filePath.c_str());
      continue;
    }
  }
#endif

  return total_frame_;
}

int32_t VideoSource::GetYuvFromFile(VZ_HLP_ALG_Frame &bigFrame, VZ_HLP_ALG_Frame &smallFrame) {
  char filePath[256];
  snprintf(filePath, sizeof(filePath), "%s\\%04d.jpg", cur_frame_path_.c_str(), ++cur_frame_id_);
  cv::Mat bgrImage = cv::imread(filePath, cv::IMREAD_COLOR);
  if (bgrImage.empty()) {
    LOGE("Error: Unable to load image:%s", filePath);
    return -1;
  }
  LOGI("Read file success:%s, w:%d, h:%d.", filePath, bgrImage.cols, bgrImage.rows);

  // 大图resize到算法指定的分辨率
  cv::Mat bigBgrImage;
  cv::Size bigSize(big_frame_width_, big_frame_height_);
  if(bgrImage.cols != big_frame_width_ || bgrImage.rows != big_frame_height_){
    cv::resize(bgrImage, bigBgrImage, bigSize);
  } else{
    bigBgrImage = bgrImage;
  }

  // 转换为 NV12
  cv::Mat bigN12Image;
  int32_t ret = convertBGRToNV12(bigBgrImage, bigN12Image);
  if (ret != 0) {
    LOGE("Error: convert bgr to nv12.");
    return -1;
  }

  // 小图resize到算法指定的分辨率
  cv::Mat resizedImg;
  cv::Size smallSize(small_frame_width_, small_frame_height_); // 指定的缩放尺寸
  cv::resize(bgrImage, resizedImg, smallSize);

  cv::Mat smallNv12Image;
  ret = convertBGRToNV12(resizedImg, smallNv12Image);
  if (ret != 0) {
    LOGE("Error: convert bgr to nv12.");
    return -1;
  }

  //转为算法使用的结构体
  CvImg2AlgFrame(bigN12Image, bigFrame, bigSize);
  CvImg2AlgFrame(smallNv12Image, smallFrame, smallSize);

  return 0;
}

int32_t VideoSource::convertBGRToNV12(const cv::Mat &bgrImage, cv::Mat &nv12Image) {
  // 确保输入是 BGR 图像
  if (bgrImage.empty() || bgrImage.type() != CV_8UC3) {
    LOGE("img type error:%d", bgrImage.type());
    return -1;
  }

  // 转换为 YUV 格式
  cv::Mat yuvImage;
  cv::cvtColor(bgrImage, yuvImage, cv::COLOR_BGR2YUV_I420);

  // 提取 Y、U、V 平面
  int32_t width = bgrImage.cols;
  int32_t height = bgrImage.rows;
  int32_t yPlaneSize = width * height;
  int32_t uvPlaneSize = yPlaneSize / 4;

  // 创建 NV12 图像
  nv12Image.create(height + height / 2, width, CV_8UC1);

  // 复制 Y 平面
  memcpy(nv12Image.data, yuvImage.data, yPlaneSize);

  // 交错复制 UV 平面
  for (int32_t j = 0; j < height / 2; j++) {
    for (int32_t i = 0; i < width / 2; i++) {
      nv12Image.data[yPlaneSize + j * width + i * 2] = yuvImage.data[yPlaneSize + j * width / 2 + i];     // U
      nv12Image.data[yPlaneSize + j * width + i * 2 + 1] =
          yuvImage.data[yPlaneSize + yPlaneSize / 4 + j * width / 2 + i]; // V
    }
  }
  return 0;
}

int32_t VideoSource::CvImg2AlgFrame(const cv::Mat &nv12Image, VZ_HLP_ALG_Frame &algFrame, cv::Size imgSize) {
  algFrame.u32Width = imgSize.width;
  algFrame.u32Height = imgSize.height;
  algFrame.u64FramePhy[0] = (alg_u64) nv12Image.data;
  algFrame.u64FramePhy[1] = (alg_u64) nv12Image.data + algFrame.u32Width;
  algFrame.u64FramePhy[2] = (alg_u64) 0;
  algFrame.u64VirAddr[0] = (alg_u64) nv12Image.data;
  algFrame.u64VirAddr[1] = (alg_u64) nv12Image.data + algFrame.u32Width;
  algFrame.u64VirAddr[2] = (alg_u64) 0;
  algFrame.u32Stride[0] = algFrame.u32Width;
  algFrame.u32Stride[1] = algFrame.u32Width;
  algFrame.u32Stride[2] = algFrame.u32Width;
  algFrame.u64TimeStamp = time(NULL);
  algFrame.u64LocalTime = time(NULL);
  algFrame.eImageType = image_yuv420sp;

  LOGI("w:%d, h:%d", algFrame.u32Width, algFrame.u32Height);

  return 0;
}

} // alg