#include "njs_video_render.h"
#include "api/video/i420_buffer.h"
#include "api/video/video_frame.h"
#include "rtc_base/time_utils.h"
#include "rtc_base/logging.h"
#include "libyuv.h"
#include "rawfile/raw_file_manager.h"
#include "rawfile/raw_file.h"

namespace ohosrtc {
NJSVideoRender::NJSVideoRender(std::shared_ptr<webrtc::VideoRenderInterface> video_render) { cpp_impl_ = video_render; }

void NJSVideoRender::DrawYuvFile(napi_value rawManager, std::string fileName) {
	napi_env env = aki::JSBind::GetScopedEnv();
  NativeResourceManager *mNativeResMgr = OH_ResourceManager_InitNativeResourceManager(env, rawManager);
	RawFile* rawFile = OH_ResourceManager_OpenRawFile(mNativeResMgr, fileName.c_str());
	
	long len = OH_ResourceManager_GetRawFileSize(rawFile);
  std::unique_ptr<uint8_t[]> data = std::make_unique<uint8_t[]>(len);
  int res = OH_ResourceManager_ReadRawFile(rawFile, data.get(), len);

  int width = 176;
  int height = 144;
  size_t y_size = width * height;
  size_t uv_size = y_size / 4; // 对于YUV 420，U和V各为1/4 Y的大小

  uint8_t* y_plane = data.get();
  uint8_t* u_plane = y_plane + y_size;
  uint8_t* v_plane = u_plane + uv_size;

  rtc::scoped_refptr<webrtc::I420Buffer> buffer = webrtc::I420Buffer::Create(width, height);
  memcpy(buffer->MutableDataY(), y_plane, width * height);
  memcpy(buffer->MutableDataU(), u_plane, width * height / 4);
  memcpy(buffer->MutableDataV(), v_plane, width * height / 4);

  int64_t timestamp_rtp = 0;
  int64_t time_us = rtc::TimeMicros();
  webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
        .set_video_frame_buffer(buffer)
        .set_timestamp_rtp(timestamp_rtp)
        .set_timestamp_us(time_us)
        .set_rotation(webrtc::kVideoRotation_0)
        .build();


  cpp_impl_->OnFrame(frame);

  OH_ResourceManager_CloseRawFile(rawFile);
  OH_ResourceManager_ReleaseNativeResourceManager(mNativeResMgr);
}

void NJSVideoRender::DrawYuvBuffer(int width, int height, aki::ArrayBuffer buffer) {
  auto buffer_data = reinterpret_cast<uint8_t *>(buffer.GetData());

  rtc::scoped_refptr<webrtc::I420Buffer> i40_buffer =
      webrtc::I420Buffer::Create(width, height);
  i40_buffer->InitializeData();

  libyuv::NV21ToI420(buffer_data, width, buffer_data + width * height, width,
                     i40_buffer->MutableDataY(), i40_buffer->StrideY(),
                     i40_buffer->MutableDataU(), i40_buffer->StrideU(),
                     i40_buffer->MutableDataV(), i40_buffer->StrideV(), width,
                     height);

  int64_t timestamp_rtp = 0;
  int64_t time_us = rtc::TimeMicros();
  webrtc::VideoFrame frame = webrtc::VideoFrame::Builder()
          .set_video_frame_buffer(i40_buffer)
          .set_timestamp_rtp(timestamp_rtp)
          .set_timestamp_us(time_us)
          .set_rotation(webrtc::kVideoRotation_90)
          .build();
	
  cpp_impl_->OnFrame(frame);
}

void NJSVideoRender::ClearColor(std::vector<float> rgba_color) {}

void NJSVideoRender::Release() {}

} // namespace webrtc