#include "ohos_video_decoder.h"
#include "api/scoped_refptr.h"
#include "libyuv.h"
#include <multimedia/player_framework/native_avcodec_videodecoder.h>
#include <multimedia/player_framework/native_avcapability.h>
#include <multimedia/player_framework/native_avformat.h>
#include <multimedia/player_framework/native_avbuffer.h>
#include <native_window/external_window.h>
#include <chrono>

namespace webrtc {

#define MAX_INPUT_FRAME_CNT 30
#define MAX_WAIT_TIME_MS 100

static void OnError(OH_AVCodec *codec, int32_t errorCode, void *userData)
{
  RTC_LOG_F(LS_ERROR) << "OhosVideoDecoder OnError, decoder: " << userData << ", errorCode: " << errorCode;
}

static void OnStreamChanged(OH_AVCodec *codec, OH_AVFormat *format, void *userData)
{
  RTC_LOG_F(LS_INFO) << "OhosVideoDecoder OnStreamChanged, decoder: " << userData << ", OH_AVFormat: " << format;
  auto decoder = static_cast<OhosVideoDecoder*>(userData);
  if (decoder && format && !decoder->UseSurface()) {
    int32_t width, height;
    OH_AVFormat_GetIntValue(format, OH_MD_KEY_WIDTH, &width);
    OH_AVFormat_GetIntValue(format, OH_MD_KEY_HEIGHT, &height);
    decoder->oh_width_ = width;
    decoder->oh_height_ = height;
    RTC_LOG_F(LS_INFO) << "OhosVideoDecoder OnStreamChanged, decoder:"<<decoder<<", width:"<<width<<", height:"<<height;
  }
}

static void OnNeedInputBuffer(OH_AVCodec *codec, uint32_t index, OH_AVBuffer *buffer, void *userData)
{
  auto decoder = static_cast<OhosVideoDecoder*>(userData);
  if (decoder) {
    decoder->OnCodecNeedInputData(index, buffer);
  }
}

static void OnNewOutputBuffer(OH_AVCodec *codec, uint32_t index, OH_AVBuffer *buffer, void *userData)
{
  auto decoder = static_cast<OhosVideoDecoder*>(userData);
  if (decoder) {
    decoder->OnCodecNewOutputData(index, buffer);
  }
}

OhosVideoDecoder::OhosVideoDecoder() {
  RTC_LOG_T(LS_INFO)<<"ctor";
}

OhosVideoDecoder::~OhosVideoDecoder() {
  RTC_LOG_T(LS_INFO)<<"dtor";
  Release();
}

bool OhosVideoDecoder::Configure(const Settings& settings) {
  RTC_LOG_T(LS_INFO) << "Configure enter";
  if (settings.codec_type() != kVideoCodecH264 && settings.codec_type() != kVideoCodecH265) {
    RTC_LOG_T(LS_ERROR) << "Invalid codec type!";
    return false;
  }
  const char *mime = (settings.codec_type() == kVideoCodecH264) ? OH_AVCODEC_MIMETYPE_VIDEO_AVC : OH_AVCODEC_MIMETYPE_VIDEO_HEVC;
  OH_AVCapability *capability = OH_AVCodec_GetCapabilityByCategory(OH_AVCODEC_MIMETYPE_VIDEO_AVC, false, HARDWARE);
  if (!capability) {
    RTC_LOG_T(LS_ERROR) << "Hardware not support!";
    return false;
  }
  const RenderResolution& resolution = settings.max_render_resolution();
  if (!resolution.Valid()) {
    RTC_LOG_T(LS_ERROR) << "Invalid resolution!";
    return false;
  }
  width_ = resolution.Width();
  height_ = resolution.Height();
  codec_name_ = OH_AVCapability_GetName(capability);
  oh_decoder_ = OH_VideoDecoder_CreateByName(codec_name_.c_str());
  if (!oh_decoder_) {
    RTC_LOG_T(LS_ERROR) << "Create decoder failed!";
    return false;
  }
  RTC_LOG_T(LS_INFO) << "Create ohos decoder:"<<oh_decoder_;
  if (use_surface_) {
    RTC_LOG_T(LS_INFO) << "Use surface mode";
    if (!InitGLContext()) {
      RTC_LOG_T(LS_ERROR) << "Init GL context failed!";
      return false;
    }
  }
  OH_AVCodecCallback cb = {&OnError, &OnStreamChanged, &OnNeedInputBuffer, &OnNewOutputBuffer};
  int32_t ret = OH_VideoDecoder_RegisterCallback(oh_decoder_, cb, this);
  if (ret != AV_ERR_OK) {
    RTC_LOG_T(LS_ERROR) << "Set callback failed: " << ret;
    return false;
  }
  constexpr OH_AVPixelFormat DEFAULT_PIXELFORMAT = AV_PIXEL_FORMAT_NV12;
  OH_AVFormat *format = OH_AVFormat_Create();
  OH_AVFormat_SetIntValue(format, OH_MD_KEY_WIDTH, width_);
  OH_AVFormat_SetIntValue(format, OH_MD_KEY_HEIGHT, height_);
  OH_AVFormat_SetIntValue(format, OH_MD_KEY_PIXEL_FORMAT, DEFAULT_PIXELFORMAT);
  OH_AVFormat_SetIntValue(format, OH_MD_KEY_VIDEO_ENABLE_LOW_LATENCY, 1);
  RTC_LOG_T(LS_INFO) << "Configure decoder:"<<codec_name_.c_str()<<", w:"<<width_<<", h:"<<height_;
  ret = OH_VideoDecoder_Configure(oh_decoder_, format);
  OH_AVFormat_Destroy(format);
  if (ret != AV_ERR_OK) {
    RTC_LOG_T(LS_ERROR) << "Configure decoder failed: " << ret;
    return false;
  }
  if (use_surface_) {
    ret = OH_VideoDecoder_SetSurface(oh_decoder_, native_window_);
    if (ret != AV_ERR_OK) {
      RTC_LOG_T(LS_ERROR) << "Set surface failed: " << ret;
      return false;
    }
  }
  ret = OH_VideoDecoder_Prepare(oh_decoder_);
  if (ret != AV_ERR_OK) {
    RTC_LOG_T(LS_ERROR) << "Prepare decoder failed: " << ret;
    return false;
  }
  ret = OH_VideoDecoder_Start(oh_decoder_);
  if (ret != AV_ERR_OK) {
    RTC_LOG_T(LS_ERROR) << "Start decoder failed: " << ret;
    return false;
  }
  return true;
}

int32_t OhosVideoDecoder::Decode(
  const EncodedImage& inputImage, bool missingFrames,
  int64_t renderTimeMs) {
  if (!oh_decoder_) {
    return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
  }
  if (wait_key_frame_ && inputImage._frameType != VideoFrameType::kVideoFrameKey) {
    return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
  }
  if (inputImage._encodedWidth > 0 && inputImage._encodedHeight > 0 && (inputImage._encodedWidth != width_ || inputImage._encodedHeight != height_)) {
    RTC_LOG_T(LS_INFO) << "Input video size changed from " << width_<<"x"<<height_<<" to "<<inputImage._encodedWidth<<"x"<<inputImage._encodedHeight;
    width_ = inputImage._encodedWidth;
    height_ = inputImage._encodedHeight;
  }
  DecoderInputData input_data(0, nullptr);
  {
    std::unique_lock<std::mutex> lock(queue_mutex_);
    if (input_data_queue_.empty()) {
      if (cv_.wait_for(lock, std::chrono::milliseconds(MAX_WAIT_TIME_MS)) == std::cv_status::timeout) {
        RTC_LOG_T(LS_ERROR)<<"No valid input buffer!";
        return WEBRTC_VIDEO_CODEC_ERROR;
      }
    }

    input_data = input_data_queue_.front();
    input_data_queue_.pop();
  }
  uint8_t* dst = reinterpret_cast<uint8_t *>(OH_AVBuffer_GetAddr(input_data.mem));
  memcpy(dst, inputImage.data(), inputImage.size());
  OH_AVCodecBufferAttr info;
  info.size = inputImage.size();
  info.offset = 0;
  info.pts = inputImage.capture_time_ms_*1000;
  int32_t ret = OH_AVBuffer_SetBufferAttr(input_data.mem, &info);
  if (ret != AV_ERR_OK) {
    RTC_LOG_T(LS_ERROR) << "Set buffer attrib failed: " << ret;
    return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
  }
  ret = OH_VideoDecoder_PushInputBuffer(oh_decoder_, input_data.index);
  if (ret != AV_ERR_OK) {
    RTC_LOG_T(LS_ERROR) << "Push input buffer failed: " << ret;
    return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
  } else {
    h264_bitstream_parser_.ParseBitstream(inputImage);
    absl::optional<uint8_t> qp = h264_bitstream_parser_.GetLastSliceQp();
    std::lock_guard<std::mutex> lock(infos_mutex_);
    if ((int)input_video_infos_.size() >= MAX_INPUT_FRAME_CNT) {
      RTC_LOG_T(LS_ERROR) << "Decoder is far behind input!";
      return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
    }
    input_video_infos_.emplace(width_, height_, inputImage.capture_time_ms_, inputImage.ntp_time_ms_, inputImage.RtpTimestamp(), inputImage.rotation_, qp);
  }
  wait_key_frame_ = false;
  return WEBRTC_VIDEO_CODEC_OK;
}

int32_t OhosVideoDecoder::RegisterDecodeCompleteCallback(DecodedImageCallback* callback) {
  callback_ = callback;
  return WEBRTC_VIDEO_CODEC_OK;
}

VideoDecoder::DecoderInfo OhosVideoDecoder::GetDecoderInfo() const {
  VideoDecoder::DecoderInfo decoder_info;
  decoder_info.implementation_name = codec_name_;
  decoder_info.is_hardware_accelerated = true;
  return decoder_info;
}

void OhosVideoDecoder::OnCodecNeedInputData(uint32_t index, OH_AVBuffer *mem) {
  std::unique_lock<std::mutex> lock(queue_mutex_);
  input_data_queue_.emplace(index, mem);
  cv_.notify_one();
}

void OhosVideoDecoder::OnCodecNewOutputData(uint32_t index, OH_AVBuffer *mem) {
  if (use_surface_) {
    DeliverTextureFrame(index);
  } else {
    DeliverByteFrame(index, mem);
  }
}

void OhosVideoDecoder::DeliverByteFrame(uint32_t index, OH_AVBuffer *mem) {
  int32_t ret;
  OH_AVCodecBufferAttr buffer_info;
  ret = OH_AVBuffer_GetBufferAttr(mem, &buffer_info);
  if (ret != AV_ERR_OK) {
    RTC_LOG_T(LS_ERROR) << "Get buffer attrib failed: " << ret;
    OH_VideoDecoder_FreeOutputBuffer(oh_decoder_, index);
    return;
  }
  if (buffer_info.size <= 0) {
    RTC_LOG_T(LS_ERROR) << "Invalid decoded buffer!";
    OH_VideoDecoder_FreeOutputBuffer(oh_decoder_, index);
    return;
  }
  uint8_t * decoded_data = reinterpret_cast<uint8_t*>(OH_AVBuffer_GetAddr(mem));
  int32_t decoded_size = buffer_info.size;
  // assert(decoded_size == oh_width_*oh_height_*3/2);
  DecoderInputVideoInfo input_info(0, 0, 0, 0, 0, kVideoRotation_0, absl::nullopt);
  {
    std::lock_guard<std::mutex> lock(infos_mutex_);
    if (input_video_infos_.empty()) {
      RTC_LOG_T(LS_ERROR) << "Input info is empty!";
      OH_VideoDecoder_FreeOutputBuffer(oh_decoder_, index);
      return;
    }
    input_info = input_video_infos_.front();
    input_video_infos_.pop();
  }
  scoped_refptr<I420Buffer> frame_buffer = pool_.CreateI420Buffer(input_info.width, input_info.height);
  if (!frame_buffer) {
    RTC_LOG_T(LS_ERROR) << "Alloc output buffer memory failed!";
    OH_VideoDecoder_FreeOutputBuffer(oh_decoder_, index);
    return;
  }
  uint8_t* dst_y = frame_buffer->MutableDataY();
  uint8_t* dst_u = frame_buffer->MutableDataU();
  uint8_t* dst_v = frame_buffer->MutableDataV();
  int32_t stride_y = frame_buffer->StrideY();
  int32_t stride_u = frame_buffer->StrideU();
  int32_t stride_v = frame_buffer->StrideV();
  // libyuv::NV12ToI420(decoded_data, input_info.width, decoded_data+input_info.width*input_info.height, input_info.width, dst_y, stride_y, dst_u, stride_u, dst_v, stride_v, input_info.width, input_info.height);
  libyuv::ConvertToI420(decoded_data, decoded_size, dst_y, stride_y, dst_u, stride_u, dst_v, stride_v, 0, 0, oh_width_, oh_height_, input_info.width, input_info.height, libyuv::kRotate0, libyuv::FOURCC_NV12);
  VideoFrame decoded_frame(frame_buffer, kVideoRotation_0, input_info.timestamp_ms*1000);
  decoded_frame.set_ntp_time_ms(input_info.ntp_timestamp_ms);
  decoded_frame.set_timestamp(input_info.rtp_timestamp);
  if (callback_) {
    callback_->Decoded(decoded_frame, absl::nullopt, input_info.qp);
  }
  ret = OH_VideoDecoder_FreeOutputBuffer(oh_decoder_, index);
  if (ret != AV_ERR_OK) {
    RTC_LOG_T(LS_ERROR) << "Free output buffer failed: " << ret;
  }
}

void OhosVideoDecoder::DeliverTextureFrame(uint32_t index) {
  int32_t width, height;
  {
    std::lock_guard<std::mutex> lock(infos_mutex_);
    if (input_video_infos_.empty()) {
      RTC_LOG_T(LS_ERROR) << "Input info is empty!";
      OH_VideoDecoder_FreeOutputBuffer(oh_decoder_, index);
      return;
    }
    width = input_video_infos_.front().width;
    height = input_video_infos_.front().height;
  }
  int32_t ret = OH_NativeWindow_NativeWindowHandleOpt(native_window_, SET_BUFFER_GEOMETRY, width, height);
  if (ret != AV_ERR_OK) {
    RTC_LOG_T(LS_ERROR) << "Set texture size failed, ret:"<<ret;
    OH_VideoDecoder_FreeOutputBuffer(oh_decoder_, index);
    return;
  }
  ret = OH_VideoDecoder_RenderOutputBuffer(oh_decoder_, index);
  if (ret != AV_ERR_OK) {
    RTC_LOG_T(LS_ERROR) << "Render output buffer failed, ret:"<<ret;
  }
}

int32_t OhosVideoDecoder::Release() {
  RTC_LOG_T(LS_INFO)<<"Release";
  if (oh_decoder_) {
    int32_t ret = OH_VideoDecoder_Stop(oh_decoder_);
    if (ret != AV_ERR_OK) {
        RTC_LOG_T(LS_ERROR) << "Stop decoder failed: " << ret;
    }
    ret = OH_VideoDecoder_Destroy(oh_decoder_);
    if (ret != AV_ERR_OK) {
        RTC_LOG_T(LS_ERROR) << "Destroy decoder failed: " << ret;
    }
    oh_decoder_ = nullptr;
  }

  {
    std::unique_lock<std::mutex> lock(queue_mutex_);
    std::queue<DecoderInputData> empty;
    input_data_queue_.swap(empty);
  }

  {
    std::lock_guard<std::mutex> lock(infos_mutex_);
    std::queue<DecoderInputVideoInfo> empty;
    input_video_infos_.swap(empty);
  }

  if (native_window_) {
    OH_NativeWindow_DestroyNativeWindow(native_window_);
    native_window_ = nullptr;
  }

  if (native_image_) {
    (void)OH_NativeImage_UnsetOnFrameAvailableListener(native_image_);
    OH_NativeImage_Destroy(&native_image_);
    native_image_ = nullptr;
  }

  if (texture_id_ != 0 && gl_context_ != nullptr) {
    gl_context_->MakeCurrent(egl_surface_);
    {
      std::lock_guard<std::mutex> lock(*(OhosEGLContextManager::GetInstance().GetTextureMutex(texture_id_)));
      glDeleteTextures(1, &texture_id_);
      texture_id_ = 0U;
    }
    gl_context_->MakeCurrent();
  }

  if (egl_surface_ != EGL_NO_SURFACE && gl_context_ != nullptr) {
    gl_context_->DestroyEglSurface(egl_surface_);
    egl_surface_ = EGL_NO_SURFACE;
  }

  gl_context_.reset();

  width_ = 0;
  height_ = 0;
  wait_key_frame_ = true;

  return WEBRTC_VIDEO_CODEC_OK;
}

bool OhosVideoDecoder::InitGLContext() {
  auto gl_resource = OhosEGLContextManager::GetInstance().GetEGLContextResource();
  if (!gl_resource) {
    RTC_LOG_T(LS_ERROR) << "Failed to get EGL context resource!";
    return false;
  }
  gl_context_ = std::make_unique<EGLRenderContext>();
  if (!gl_context_->Init(gl_resource->eglContext_->GetEGLContext())) {
    RTC_LOG_T(LS_ERROR) << "Failed to init EGL context!";
    return false;
  }
  EGLint pBufferAttributes[] = {EGL_WIDTH, 1, EGL_HEIGHT, 1, EGL_NONE};
  egl_surface_ = gl_context_->CreatePBufferEglSurface(pBufferAttributes);
  if (egl_surface_ == EGL_NO_SURFACE) {
    RTC_LOG_T(LS_ERROR) << "Failed to create EGL surface!";
    return false;
  }
  gl_context_->MakeCurrent(egl_surface_);
  glGenTextures(1, &texture_id_);
  glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture_id_);
  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_REPEAT);
  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_REPEAT);
  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  native_image_ = OH_NativeImage_Create(texture_id_, GL_TEXTURE_EXTERNAL_OES);
  if (!native_image_) {
    RTC_LOG_T(LS_ERROR) << "Failed to create native image!";
    gl_context_->MakeCurrent();
    return false;
  }
  native_image_frame_available_listener_.context = this;
  native_image_frame_available_listener_.onFrameAvailable = &OhosVideoDecoder::OnNativeImageFrameAvailable;
  int32_t ret = OH_NativeImage_SetOnFrameAvailableListener(native_image_, native_image_frame_available_listener_);
  if (ret != 0) {
    RTC_LOG_T(LS_ERROR) << "Failed to set frame available listener!";
    gl_context_->MakeCurrent();
    return false;
  }
  glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0);
  native_window_ = OH_NativeImage_AcquireNativeWindow(native_image_);
  if (!native_window_) {
    RTC_LOG_T(LS_ERROR) << "Failed to acquire native window!";
    gl_context_->MakeCurrent();
    return false;
  }
  gl_context_->MakeCurrent();
  RTC_LOG_T(LS_INFO) << "Init GL context success!";
  return true;
}

void OhosVideoDecoder::OnNativeImageFrameAvailable(void *data) {
  OhosVideoDecoder* decoder = static_cast<OhosVideoDecoder*>(data);
  decoder->FrameAvailable();
}

bool OhosVideoDecoder::FrameAvailable() {
  gl_context_->MakeCurrent(egl_surface_);
  {
    std::lock_guard<std::mutex> lock(*(OhosEGLContextManager::GetInstance().GetTextureMutex(texture_id_)));
    int32_t ret = OH_NativeImage_UpdateSurfaceImage(native_image_);
    if (ret != 0) {
      RTC_LOG_T(LS_ERROR) << "Failed to update surface image, err:"<<ret;
      gl_context_->MakeCurrent();
      std::lock_guard<std::mutex> lock(infos_mutex_);
      if (!input_video_infos_.empty()) {
        input_video_infos_.pop();
      }
      return false;
    }
  }
  DecoderInputVideoInfo input_info(0, 0, 0, 0, 0, kVideoRotation_0, absl::nullopt);
  {
    std::lock_guard<std::mutex> lock(infos_mutex_);
    if (input_video_infos_.empty()) {
      RTC_LOG_T(LS_ERROR) << "Input info is empty!";
      gl_context_->MakeCurrent();
      return false;
    }
    input_info = input_video_infos_.front();
    input_video_infos_.pop();
  }
  texture_buffer_.textureID = texture_id_;
  texture_buffer_.type = OhosBufferType::OES;
  OH_NativeImage_GetTransformMatrix(native_image_, texture_buffer_.matrix);
  scoped_refptr<OhosVideoBuffer> texture_buffer = OhosVideoBuffer::Create(input_info.width, input_info.height, texture_buffer_);
  VideoFrame decoded_frame(texture_buffer, kVideoRotation_0, input_info.timestamp_ms*1000);
  decoded_frame.set_ntp_time_ms(input_info.ntp_timestamp_ms);
  decoded_frame.set_timestamp(input_info.rtp_timestamp);
  if (callback_) {
    callback_->Decoded(decoded_frame, absl::nullopt, input_info.qp);
  }
  gl_context_->MakeCurrent();
  return true;
}

}  // namespace webrtc