#include <cmath>
#include "ohos_video_frame_drawer.h"
#include "rtc_base/logging.h"
#include "libyuv/planar_functions.h"
#include "common_video/util/gl_util.h"
#include "common_video/util/frame_util.h"
#include "common_video/ohos/ohos_video_buffer.h"
#include "common_video/ohos/ohos_egl_context_manager.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"

namespace webrtc {
constexpr const char* VIDEO_FRAME_DRAWER_TAG = "VideoFrameDrawer";

VideoFrameDrawer::VideoFrameDrawer(): yuv_uploader_(std::make_shared<YuvUploader>()) {}

VideoFrameDrawer::~VideoFrameDrawer() {
  if (yuv_uploader_) {
    yuv_uploader_->Release();
  }
  yuv_uploader_.reset();
}

void VideoFrameDrawer::DumpFrame(const webrtc::VideoFrame& frame) const{
  std::string path = std::string("/data/storage/el2/base/files") + "/" + std::string("dump_frame") + "_0.yuv";
  auto file = fopen(path.c_str(), "wb");
  webrtc::PrintVideoFrameV2(frame, file);
  fclose(file);
}

void VideoFrameDrawer::DrawFrame(const std::shared_ptr<OhosGLDrawer> drawer, const webrtc::VideoFrame& frame, const std::shared_ptr<Matrix> additional_render_matrix,
                                 int viewport_x, int viewport_y, int viewport_width, int viewport_height) {
  RTC_CHECK(drawer);
  const int width = FrameUtil::GetFrameRotatedWidth(frame);
  const int height = FrameUtil::GetFrameRotatedHeight(frame);
  const int rotation = frame.rotation();
  CalculateDrawProperties(width, height, rotation, frame.is_texture(), additional_render_matrix);
  GlUtil::ConvertToGLMatrix(*render_matrix_, gl_matrix);

  if (frame.is_texture()) {
    OhosVideoBuffer *ohos_buffer = static_cast<OhosVideoBuffer*>(frame.video_frame_buffer().get());
    auto frame_buffer = ohos_buffer->GetVideoFrameBuffer();
    std::lock_guard<std::mutex> lock(*(OhosEGLContextManager::GetInstance().GetTextureMutex(frame_buffer.textureID)));
    drawer->DrawFrame(ohos_buffer->GetVideoFrameBuffer(), gl_matrix, render_width_, render_height_, viewport_x, viewport_y, viewport_width, viewport_height);
  } else {
    yuv_uploader_->UploadYuvData(frame);
    OhosVideoBuffer::TextureBuffer buffer = {
        .type = OhosBufferType::YUV,
        .yuvTexture = {
          static_cast<int>(yuv_uploader_->yuv_textures_[0]),
          static_cast<int>(yuv_uploader_->yuv_textures_[1]),
          static_cast<int>(yuv_uploader_->yuv_textures_[2]),
        }
    };
    drawer->DrawFrame(buffer, gl_matrix, render_width_, render_height_, viewport_x, viewport_y, viewport_width, viewport_height);
  }
}

void VideoFrameDrawer::CalculateDrawProperties(int frame_width, int frame_height, int frame_rotation, bool is_texture, const std::shared_ptr<Matrix> additional_render_matrix) {
  if (!additional_render_matrix) {
    render_width_ = frame_width;
    render_height_ = frame_height;
  } else {
    additional_render_matrix->MapPoints(dst_points_, src_points_, 3);

    for (int i = 0; i < 3; ++i) {
      dst_points_[i * 2 + 0] *= (float)frame_width;
      dst_points_[i * 2 + 1] *= (float)frame_height;
    }

    // Get the length of the sides of the transformed rectangle in terms of pixels.
    render_width_ = Distance(dst_points_[0], dst_points_[1], dst_points_[2], dst_points_[3]);
    render_height_ = Distance(dst_points_[0], dst_points_[1], dst_points_[4], dst_points_[5]);
  }

  render_matrix_->Reset();
  render_matrix_->PreTranslate(0.5f, 0.5f);
  render_matrix_->PreScale(1.f, -1.f);
  render_matrix_->PreRotate(frame_rotation);
  render_matrix_->PreTranslate(-0.5f, -0.5f);
  if (additional_render_matrix) {
    render_matrix_->PreConcat(*additional_render_matrix);
  }
}

int VideoFrameDrawer::Distance(float x0, float y0, float x1, float y1) {
  double distance = std::hypot(static_cast<double>(x1 - x0), static_cast<double>(y1 - y0));
  return static_cast<int>(std::round(distance));
}

VideoFrameDrawer::YuvUploader::YuvUploader(): yuv_textures_(new GLuint[3]{0, 0, 0}) {}

void VideoFrameDrawer::YuvUploader::UploadYuvData(const webrtc::VideoFrame& frame) const {

  auto width = frame.width();
  auto height = frame.height();
  auto i420 = frame.video_frame_buffer()->GetI420();

  int plane_widths[]{width, width / 2, width / 2};
  int plane_heights[]{height, height / 2, height / 2};
  int stride_yuv[]{i420->StrideY(), i420->StrideU(), i420->StrideV()};
  const uint8_t *data_yuv[]{i420->DataY(), i420->DataU(), i420->DataV()};

  for (int i = 0; i < 3; ++i) {
    int stride_width = stride_yuv[i];
    int stride_height = plane_heights[i];

    const uint8_t *buffer_data(data_yuv[i]);
    std::vector<uint8_t> packed_buffer;
    
    if (stride_width > plane_widths[i]) {
      stride_width = plane_widths[i];
      packed_buffer.resize(stride_width * stride_height);
      libyuv::CopyPlane(data_yuv[i], stride_yuv[i], packed_buffer.data(), stride_width, stride_width, stride_height);
      buffer_data = packed_buffer.data();
    }

    if (yuv_textures_[i] == 0) {
      glGenTextures(1, &yuv_textures_[i]);
      glBindTexture(GL_TEXTURE_2D, yuv_textures_[i]);
      glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
      glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
      glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
      glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    }
    glActiveTexture(GL_TEXTURE0 + i);
    glBindTexture(GL_TEXTURE_2D, yuv_textures_[i]);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, stride_width, stride_height, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, buffer_data);
  }
}

void VideoFrameDrawer::YuvUploader::Release() const {
  glDeleteTextures(3, yuv_textures_);
  delete[] yuv_textures_;
}
}