//
//  Agora Streaming Kit
//
//  Created by ShengQiang Liu in 2020-02.
//  Copyright (c) 2020 Agora IO. All rights reserved.
//
#include "rtmp_local_user_impl.h"

#include "agora/modules/video_coding/codecs/encoder/video_encoder_factory.h"
#include "api2/AgoraRefCountedObject.h"
#include "assert.h"
#include "engine_adapter/audio/audio_engine.h"
#include "engine_adapter/audio/audio_engine_interface.h"
#include "engine_adapter/audio/audio_node_rtmp_network_sink.h"
#include "engine_adapter/media_engine_manager.h"
#include "engine_adapter/video/video_node_rtmp_network_sink.h"
#include "facilities/tools/api_logger.h"
#include "facilities/tools/audio_utils.h"
#include "main/core/rtc_globals.h"
#include "rtc_base/task_queue.h"
#include "rtmp_connection_impl.h"
#include "utils/object/object_table.h"

namespace agora {
namespace rtc {

static const char MODULE_NAME[] = "[RtmpLocalUser]";

class AudioSourceProxy : public IAudioSinkBase {
 public:
  explicit AudioSourceProxy(RtmpLocalUserImpl* local_user, AudioSendStream* audio_send_stream)
      : local_user_(local_user), audio_send_stream_(audio_send_stream) {}

  // IAudioSinkBase interface
  bool onAudioFrame(const media::base::AudioPcmFrame& audioFrame) override {
    if (audio_send_stream_ || local_user_) {
      auto audio_frame = std::make_shared<webrtc::AudioFrame>();
      audio_frame->UpdateFrame(audioFrame.capture_timestamp,
                               audioFrame.data_,                 // data,
                               audioFrame.samples_per_channel_,  // samples_per_channel,
                               audioFrame.sample_rate_hz_,       // sample_rate_hz,
                               webrtc::AudioFrame::SpeechType::kNormalSpeech,  // speech_type,
                               webrtc::AudioFrame::VADActivity::kVadUnknown,   // vad_activity,
                               audioFrame.num_channels_ /*num_channels*/);
      if (audio_send_stream_) {
        audio_send_stream_->SendAudioData(audio_frame);
      }

      if (local_user_) {
        media::base::AudioPcmFrame frame;
        copy_audio_pcm_frame(audioFrame, frame, true);
        local_user_->notifyAudioFrame(&frame);
      }
    }

    return true;
  }

 private:
  RtmpLocalUserImpl* local_user_;
  AudioSendStream* audio_send_stream_;
};

class VideoNodeForwardFrame : public VideoNodeFrame {
 public:
  VideoNodeForwardFrame(utils::worker_type control_worker, utils::worker_type data_worker);
  virtual ~VideoNodeForwardFrame();

 protected:
  void DoOnFrame(const webrtc::VideoFrame& frame) override;
  bool DoStart() override;
  bool DoStop() override;
};

VideoNodeForwardFrame::VideoNodeForwardFrame(utils::worker_type control_worker,
                                             utils::worker_type data_worker)
    : VideoNodeFrame("VideoNodeForwardFrame", control_worker, data_worker) {}

VideoNodeForwardFrame::~VideoNodeForwardFrame() { NotifyWillDestroyEvent(); }

void VideoNodeForwardFrame::DoOnFrame(const webrtc::VideoFrame& frame) { ForwardFrame(frame); }

bool VideoNodeForwardFrame::DoStart() { return true; }

bool VideoNodeForwardFrame::DoStop() { return true; }

class VideoSourceProxy : public IVideoRendererEx {
 public:
  explicit VideoSourceProxy(RtmpLocalUserImpl* local_user,
                            VideoNodeForwardFrame* video_frame_forward)
      : local_user_(local_user), video_frame_forward_(video_frame_forward) {}

 public:  //  inherit from IVideoRendererEx interface
  int setRenderMode(media::base::RENDER_MODE_TYPE renderMode) override { return -1; }
  int setMirror(bool mirror) override { return -1; }
  int unsetView() override { return -1; }
  bool isExternalSink() override { return false; }
  int onFrame(const media::base::VideoFrame& video_frame) override { return -1; };
  int onFrame(const webrtc::VideoFrame& video_frame) override {
    if (local_user_) {
      local_user_->notifyVideoFrame(video_frame);
    }
    if (video_frame_forward_) {
      video_frame_forward_->OnFrame(video_frame);
    }
    return ERR_OK;
  }

 private:
  RtmpLocalUserImpl* local_user_;
  VideoNodeForwardFrame* video_frame_forward_;
};

RtmpLocalUserImpl::RtmpLocalUserImpl(const Config& config)
    : rtmp_connection_(config.rtmp_connnection),
      audio_config_(config.audio_config),
      video_config_(config.video_config),
      is_audio_published_(false),
      is_video_published_(false),
      rtmp_user_observers_(utils::RtcAsyncCallback<IRtmpLocalUserObserver>::Create()),
      audio_frame_observers_(utils::RtcSyncCallback<media::base::IAudioFrameObserver>::Create()),
      video_frame_observers_(utils::RtcSyncCallback<media::base::IVideoFrameObserver>::Create()) {
  assert(rtmp_connection_);
  utils::major_worker()->sync_call(LOCATION_HERE, [this] {
    initialize();
    return 0;
  });
}

RtmpLocalUserImpl::~RtmpLocalUserImpl() {
  audio_frame_observers_->Unregister();
  utils::major_worker()->sync_call(LOCATION_HERE, [this] {
    log(commons::LOG_INFO, "%s dtor", MODULE_NAME);
    if (is_audio_published_) {
      unpublishAudio(audio_track_);
    }
    audio_rtmp_network_sink_.reset();

    if (is_video_published_) {
      unpublishVideo(video_track_);
    }
    video_rtmp_network_sink_.reset();
    video_encoder_factory_.reset();
    return 0;
  });
}

void RtmpLocalUserImpl::initialize() {
  ASSERT_THREAD_IS(utils::major_worker()->getThreadId());

  audio_rtmp_network_sink_ =
      std::move(RtcGlobals::Instance().EngineManager()->AudioEngine().CreateRtmpNetworkSink(
          rtmp_connection_));
  video_rtmp_network_sink_ =
      std::make_shared<VideoNodeRtmpNetworkSink>(utils::major_worker(), rtmp_connection_);
}

int RtmpLocalUserImpl::setAudioStreamConfiguration(const RtmpStreamingAudioConfiguration& config) {
  API_LOGGER_MEMBER("sampleRateHz:%d, bytesPerSample:%d, numberOfChannels:%d, bitrate:%d",
                    config.sampleRateHz, config.bytesPerSample, config.numberOfChannels,
                    config.bitrate);
  return utils::major_worker()->sync_call(LOCATION_HERE, [this, &config] {
    audio_config_ = config;
    if (is_audio_published_) {
      auto config = createAudioSendStreamConfig(audio_config_);
      if (!config) {
        log(commons::LOG_WARN, "%s: Create audio send stream config failed.", MODULE_NAME);
        return -ERR_FAILED;
      }
      audio_send_stream_->Stop();
      audio_send_stream_->Reconfigure(*config);
      audio_send_stream_->Start();
    }
    return 0;
  });
}

int RtmpLocalUserImpl::setVideoStreamConfiguration(const RtmpStreamingVideoConfiguration& config) {
  API_LOGGER_MEMBER("width:%d, height:%d, fps:%d, bitrate:%d", config.width, config.height,
                    config.framerate, config.bitrate);
  return utils::major_worker()->sync_call(LOCATION_HERE, [this, &config] {
    if (is_video_published_) {
      auto config_t = createVideoSendStreamConfig(config);
      video_send_stream_->ReconfigureVideoEncoder(std::move(*config_t));
    }
    video_config_ = config;
    return 0;
  });
}

int RtmpLocalUserImpl::registerAudioFrameObserver(media::base::IAudioFrameObserver* observer) {
  API_LOGGER_MEMBER("observer: %p", observer);
  if (!observer) {
    return -ERR_INVALID_ARGUMENT;
  }
  audio_frame_observers_->Register(observer);
  return ERR_OK;
}

void RtmpLocalUserImpl::unregisterAudioFrameObserver(media::base::IAudioFrameObserver* observer) {
  API_LOGGER_MEMBER("observer: %p", observer);
  if (!observer) {
    return;
  }
  audio_frame_observers_->Unregister(observer);
}

int RtmpLocalUserImpl::registerVideoFrameObserver(media::base::IVideoFrameObserver* observer) {
  API_LOGGER_MEMBER("observer: %p", observer);
  if (!observer) {
    return -ERR_INVALID_ARGUMENT;
  }
  video_frame_observers_->Register(observer);
  return ERR_OK;
}

void RtmpLocalUserImpl::unregisterVideoFrameObserver(media::base::IVideoFrameObserver* observer) {
  API_LOGGER_MEMBER("observer: %p", observer);
  if (!observer) {
    return;
  }
  video_frame_observers_->Unregister(observer);
}

std::unique_ptr<AudioSendStream::Config> RtmpLocalUserImpl::createAudioSendStreamConfig(
    const RtmpStreamingAudioConfiguration& audioConfig) {
  auto encoder_factory = WebrtcAudioEngineAdapter::CreateDefaultAudioEncoderFactory();
  if (!encoder_factory) {
    log(commons::LOG_WARN, "%s: Create audio encoder factory failed", MODULE_NAME);
    return nullptr;
  }

  auto sink = static_cast<AudioRtmpNetworkSink*>(audio_rtmp_network_sink_.get());

  auto config = std::make_unique<AudioSendStream::Config>(sink);
  config->code_type = AUDIO_CODEC_AACLC;
  webrtc::SdpAudioFormat format{"AACLC", audioConfig.sampleRateHz,
                                static_cast<size_t>(audioConfig.numberOfChannels),
                                static_cast<size_t>(audioConfig.bitrate)};
  config->send_codec_spec = std::make_shared<AudioSendStream::Config::SendCodecSpec>(0, format);
  config->encoder_factory = encoder_factory;

  return config;
}

void RtmpLocalUserImpl::createEncoderTaskQueueIfNeeded() {
  ASSERT_THREAD_IS(utils::major_worker()->getThreadId());

  if (!task_queue_) {
    task_queue_ =
        std::make_unique<::rtc::TaskQueue>("media_encoder", ::rtc::TaskQueue::Priority::NORMAL);
  }
}

int RtmpLocalUserImpl::publishAudio(agora_refptr<rtc::ILocalAudioTrack> audioTrack) {
  API_LOGGER_MEMBER("audio track: %p", audioTrack.get());

  if (!audioTrack) {
    return -ERR_INVALID_ARGUMENT;
  }
  return utils::major_worker()->sync_call(LOCATION_HERE, [this, &audioTrack] {
    if (is_audio_published_) {
      log(commons::LOG_WARN, "%s audio stream already in published state!", MODULE_NAME);
      return 0;
    }

    createEncoderTaskQueueIfNeeded();

    auto config = createAudioSendStreamConfig(audio_config_);
    if (!config) {
      log(commons::LOG_WARN, "%s: Create audio send stream config failed.", MODULE_NAME);
      return -ERR_FAILED;
    }

    auto audio_send_stream = AudioSendStream::Create(*config, task_queue_.get());
    if (!audio_send_stream) {
      log(commons::LOG_WARN, "%s: Create audio send stream failed.", MODULE_NAME);
      return -ERR_FAILED;
    }
    audio_send_stream->Start();

    agora_refptr<rtc::IAudioSinkBase> audio_source =
        new RefCountedObject<AudioSourceProxy>(this, audio_send_stream.get());

    rtc::AudioSinkWants sinkWants;
    sinkWants.channels = static_cast<size_t>(audio_config_.numberOfChannels);
    // Notice: From jira/MS-52546 we support 48khz samplerate on RTMP/FLV/AAC
    sinkWants.samplesPerSec = audio_config_.sampleRateHz;
    // FIXME: jira/MS-17555
    // sinkWants.samplesPerSec = audio_config_.sampleRateHz;
    // sinkWants.samplesPerSec = 44100;

    audio_track_ = audioTrack;

    if (!audio_track_->addAudioSink(audio_source, sinkWants)) {
      log(commons::LOG_ERROR, "%s failed to publish audio stream", MODULE_NAME);
      rtmp_user_observers_->Post(LOCATION_HERE, [this](auto callback) {
        callback->onAudioTrackPublicationFailure(audio_track_, PUBLISH_AUDIO_ERR_FAILED);
      });
      return -ERR_FAILED;
    } else {
      rtmp_user_observers_->Post(LOCATION_HERE, [this](auto callback) {
        callback->onAudioTrackPublishSuccess(audio_track_);
      });
    }

    audio_send_stream_ = std::move(audio_send_stream);
    audio_source_ = audio_source;

    is_audio_published_ = true;

    return static_cast<int>(ERR_OK);
  });
}

int RtmpLocalUserImpl::unpublishAudio(agora_refptr<rtc::ILocalAudioTrack> audioTrack) {
  API_LOGGER_MEMBER("audio track: %p", audioTrack.get());

  if (!audioTrack) {
    return -ERR_INVALID_ARGUMENT;
  }
  return utils::major_worker()->sync_call(LOCATION_HERE, [this, &audioTrack] {
    if (!is_audio_published_) {
      log(commons::LOG_WARN, "%s audio stream not in published state!", MODULE_NAME);
      return static_cast<int>(ERR_OK);
    }

    if (!audio_track_->removeAudioSink(audio_source_)) {
      log(commons::LOG_ERROR, "%s failed to un-publish audio stream", MODULE_NAME);
      return -ERR_FAILED;
    }
    audio_source_.reset();
    audio_send_stream_->Stop();
    audio_track_.reset();

    is_audio_published_ = false;
    return static_cast<int>(ERR_OK);
  });
}

std::unique_ptr<VideoSendStream::Config> RtmpLocalUserImpl::createVideoSendStreamConfig(
    const RtmpStreamingVideoConfiguration& video_config) {
  std::unique_ptr<VideoSendStream::Config> config =
      std::make_unique<VideoSendStream::Config>(video_rtmp_network_sink_.get());
  if (!video_encoder_factory_) {
    video_encoder_factory_ = rtc::CreateEncoderFactory();
  }
  config->encoder_settings.encoder_factory = video_encoder_factory_.get();

  config->video_config.width = video_config.width;
  config->video_config.height = video_config.height;
  config->video_config.framerate = video_config.framerate;
  config->video_config.max_bitrate_kbps = video_config.maxBitrate;
  config->video_config.min_bitrate_kbps = video_config.minBitrate;
  config->video_config.target_bitrate_kbps = video_config.bitrate;
  config->video_config.gop_ms = static_cast<int>(video_config.gopInMs);
  config->video_config.encoder_hw_sw_mode = static_cast<int>(video_config.encoderHwSwMode);
  config->video_config.encoder_bitrate_control_mode =
      static_cast<int>(video_config.encoderBitrateControlMode);
  return config;
}

int RtmpLocalUserImpl::publishMediaPlayerAudio(agora_refptr<rtc::ILocalAudioTrack> audioTrack,
                                               int32_t playerId) {
  return ERR_OK;
}

int RtmpLocalUserImpl::unpublishMediaPlayerAudio(agora_refptr<rtc::ILocalAudioTrack> audioTrack,
                                                 int32_t playerId) {
  return ERR_OK;
}

int RtmpLocalUserImpl::publishVideo(agora_refptr<rtc::ILocalVideoTrack> videoTrack) {
  API_LOGGER_MEMBER("video track: %p", videoTrack.get());

  if (!videoTrack) {
    return -ERR_INVALID_ARGUMENT;
  }
  return utils::major_worker()->sync_call(LOCATION_HERE, [this, &videoTrack] {
    if (is_video_published_) {
      log(commons::LOG_WARN, "%s video stream already in published state!", MODULE_NAME);
      return -ERR_INVALID_STATE;
    }
    if (videoTrack == video_track_) {
      log(commons::LOG_WARN, "%s: try to publish a video_track %p, has published %p", MODULE_NAME,
          videoTrack.get(), video_track_.get());
      return -ERR_INVALID_ARGUMENT;
    }

    createEncoderTaskQueueIfNeeded();

    video_rtmp_network_sink_->Start();
    auto config = createVideoSendStreamConfig(video_config_);

    auto video_send_stream = VideoSendStream::Create(std::move(*config), task_queue_.get());
    auto video_frame_forward =
        std::make_shared<VideoNodeForwardFrame>(utils::major_worker(), utils::major_worker());
    video_frame_forward->Start();

    video_send_stream->SetSource(video_frame_forward.get());

    agora_refptr<IVideoRendererEx> video_source =
        new RefCountedObject<VideoSourceProxy>(this, video_frame_forward.get());

    if (!videoTrack->addRenderer(video_source, media::base::POSITION_PRE_ENCODER)) {
      log(commons::LOG_ERROR, "%s failed to publish video stream", MODULE_NAME);
      rtmp_user_observers_->Post(LOCATION_HERE, [this, videoTrack](auto callback) {
        callback->onVideoTrackPublicationFailure(videoTrack, PUBLISH_VIDEO_ERR_FAILED);
      });
      return -ERR_FAILED;
    } else {
      rtmp_user_observers_->Post(LOCATION_HERE, [this, videoTrack](auto callback) {
        callback->onVideoTrackPublishSuccess(videoTrack);
      });
    }

    video_track_ = videoTrack;
    video_send_stream->Start();

    video_send_stream_ = std::move(video_send_stream);
    video_source_ = std::move(video_source);
    video_frame_forward_ = std::move(video_frame_forward);

    is_video_published_ = true;
    return static_cast<int>(ERR_OK);
  });
}
int RtmpLocalUserImpl::unpublishVideo(agora_refptr<rtc::ILocalVideoTrack> videoTrack) {
  API_LOGGER_MEMBER("video track: %p", videoTrack.get());

  if (!videoTrack) {
    return -ERR_INVALID_ARGUMENT;
  }

  return utils::major_worker()->sync_call(LOCATION_HERE, [this, &videoTrack] {
    if (!is_video_published_) {
      log(commons::LOG_WARN, "%s video stream not in published state!", MODULE_NAME);
      return -ERR_INVALID_STATE;
    }

    if (videoTrack != video_track_) {
      log(commons::LOG_WARN, "%s: try to unpublish a video_track %p, not published %p", MODULE_NAME,
          videoTrack.get(), video_track_.get());
      return -ERR_INVALID_ARGUMENT;
    }

    if (!video_track_->removeRenderer(video_source_, media::base::POSITION_PRE_ENCODER)) {
      log(commons::LOG_WARN, "%s: Remove video renderer failed when unpublish video", MODULE_NAME);
    } else {
      video_source_.reset();
    }

    video_frame_forward_->Stop();

    video_send_stream_->Stop();
    video_send_stream_->SetSource(nullptr);
    video_send_stream_.reset();

    video_frame_forward_.reset();
    video_track_.reset();

    video_rtmp_network_sink_->Stop();

    is_video_published_ = false;

    return static_cast<int>(ERR_OK);
  });
}

int RtmpLocalUserImpl::registerRtmpUserObserver(IRtmpLocalUserObserver* observer,
                                                void (*safeDeleter)(IRtmpLocalUserObserver*)) {
  API_LOGGER_MEMBER("observer: %p", observer);
  if (!observer) {
    return -ERR_INVALID_ARGUMENT;
  }
  rtmp_user_observers_->Register(observer, safeDeleter);
  return ERR_OK;
}

int RtmpLocalUserImpl::unregisteRtmpUserObserver(IRtmpLocalUserObserver* observer) {
  API_LOGGER_MEMBER("observer: %p", observer);
  if (!observer) {
    return -ERR_INVALID_ARGUMENT;
  }
  rtmp_user_observers_->Unregister(observer);
  return ERR_OK;
}

void RtmpLocalUserImpl::adjustVideoBitrate(VideoBitrateAdjustType type) {
  API_LOGGER_MEMBER("type: %d", static_cast<int>(type));

  utils::major_worker()->sync_call(LOCATION_HERE, [this, &type] {
    // Only enable audio streaming.
    if (!video_send_stream_ || !is_video_published_) {
      return static_cast<int>(-ERR_INVALID_STATE);
    }
    video_send_stream_->adjustVideoBitrate(static_cast<VideoSendStream::VideoBitrateType>(type));
    return static_cast<int>(ERR_OK);
  });
}

int RtmpLocalUserImpl::adjustRecordingSignalVolume(int volume) {
  API_LOGGER_MEMBER("volume: %d", volume);

  return utils::major_worker()->sync_call(LOCATION_HERE, [this, volume] {
    if (is_audio_published_) {
      return audio_track_->adjustPublishVolume(volume);
    }
    return static_cast<int>(-ERR_INVALID_STATE);
  });
}

int RtmpLocalUserImpl::getRecordingSignalVolume(int32_t* volume) {
  API_LOGGER_MEMBER("volume: %p", volume);
  if (!volume) {
    return -ERR_INVALID_ARGUMENT;
  }

  return utils::major_worker()->sync_call(LOCATION_HERE, [this, volume] {
    if (is_audio_published_) {
      return audio_track_->getPublishVolume(volume);
    }
    return static_cast<int>(-ERR_INVALID_STATE);
  });
}

int RtmpLocalUserImpl::setAudioEnabled(bool enabled) {
  API_LOGGER_MEMBER("enabled: %d", enabled);

  return utils::major_worker()->sync_call(LOCATION_HERE, [this, enabled] {
    if (is_audio_published_) {
      audio_track_->setEnabled(enabled);
      return static_cast<int>(ERR_OK);
    }
    return static_cast<int>(-ERR_INVALID_STATE);
  });
}

int RtmpLocalUserImpl::setVideoEnabled(bool enabled) {
  API_LOGGER_MEMBER("enabled: %d", enabled);

  return utils::major_worker()->sync_call(LOCATION_HERE, [this, enabled] {
    if (is_video_published_) {
      video_track_->setEnabled(enabled);
      return static_cast<int>(ERR_OK);
    }
    return static_cast<int>(-ERR_INVALID_STATE);
  });
}

void RtmpLocalUserImpl::notifyAudioFrame(media::base::AudioPcmFrame* audioFrame) {
  audio_frame_observers_->Call([&audioFrame](auto observer) { observer->onFrame(audioFrame); });
}

void RtmpLocalUserImpl::notifyVideoFrame(const webrtc::VideoFrame& video_frame) {
  if (video_config_.width != video_frame.width() || video_config_.height != video_frame.height()) {
    RtmpStreamingVideoConfiguration cfg = video_config_;
    cfg.width = video_frame.width();
    cfg.height = video_frame.height();
    setVideoStreamConfiguration(cfg);
  }
  // convert to agora VideoFrameEx, backed by a webrtc VideoFrameBuffer
  agora::media::base::VideoFrame agora_video_frame;
  agora_video_frame.width = video_frame.width();
  agora_video_frame.height = video_frame.height();
  agora_video_frame.rotation = video_frame.rotation();
  agora_video_frame.renderTimeMs = video_frame.render_time_ms();
  bool use_native_buffer = false;
#if (defined(WEBRTC_ANDROID) && !defined(RTC_EXCLUDE_JAVA)) || defined(WEBRTC_IOS) || \
    defined(WEBRTC_MAC)
  use_native_buffer = true;
  agora_video_frame.type = media::base::VIDEO_PIXEL_DEFAULT;
//  agora_video_frame.videoFrameBuffer = video_frame.video_frame_buffer();
#endif
  // TODO(Haonong Yu): 2020/5/8 no need to convert to I420 once pipeline supports native format
  if (!use_native_buffer) {
    agora_video_frame.type = media::base::VIDEO_PIXEL_I420;
    auto i420_buffer = video_frame.video_frame_buffer()->ToI420();
    //    agora_video_frame.videoFrameBuffer = i420_buffer;
    agora_video_frame.yBuffer = const_cast<uint8_t*>(i420_buffer->DataY());
    agora_video_frame.yStride = i420_buffer->StrideY();
    agora_video_frame.uBuffer = const_cast<uint8_t*>(i420_buffer->DataU());
    agora_video_frame.uStride = i420_buffer->StrideU();
    agora_video_frame.vBuffer = const_cast<uint8_t*>(i420_buffer->DataV());
    agora_video_frame.vStride = i420_buffer->StrideV();
  }

  // raw data callback
  video_frame_observers_->Call(
      [&agora_video_frame](auto observer) { observer->onFrame(&agora_video_frame); });
}

}  // namespace rtc
}  // namespace agora
