

#include "Player.h"
#include "av_codec_sample_log.h"
#include "dfx/error/av_codec_sample_error.h"
#include <cstdint>
#include "config/config.h"

#undef LOG_TAG
#define LOG_TAG "player"

namespace {
constexpr int BALANCE_VALUE = 5;
using namespace std::chrono_literals;
static const int MS_TO_S = 1000;
constexpr int64_t WAIT_TIME_US_THRESHOLD_WARNING = -1 * 40 * 1000;  // warning threshold 40ms
constexpr int64_t WAIT_TIME_US_THRESHOLD = 1 * 1000 * 1000;         // max sleep time 1s
constexpr int64_t SINK_TIME_US_THRESHOLD = 100000;                  // max sink time 100ms
constexpr int32_t BYTES_PER_SAMPLE_2 = 2;                           // 2 bytes per sample
constexpr double VSYNC_TIME = 1000 / 60;                            // frame time
constexpr double LIP_SYNC_BALANCE_VALUE = 2;                        // the balance value of sync sound and picture
}  // namespace

Player::~Player()
{
    Player::StartRelease();
}

int32_t Player::Init(SampleInfo &sampleInfo)
{
    std::lock_guard<std::mutex> lock(mutex_);
    CHECK_AND_RETURN_RET_LOG(!isStarted_, AVCODEC_SAMPLE_ERR_ERROR, "Already started.");
    CHECK_AND_RETURN_RET_LOG(demuxer_ == nullptr && videoDecoder_ == nullptr, AVCODEC_SAMPLE_ERR_ERROR,
                             "Already started.");

    sampleInfo_ = sampleInfo;

    videoDecoder_ = std::make_unique<VideoDecoder>();
    audioDecoder_ = std::make_unique<AudioDecoder>();
    demuxer_ = std::make_unique<Demuxer>();
    audioRenderer_ = std::make_unique<AudioRenderer>();

    int32_t ret = demuxer_->Create(sampleInfo_);
    CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, ret, "Create demuxer failed");

    if (AppConfig::GetInstance().GetAudioValue()) {
        ret = CreateAudioDecoder();
        CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, ret, "Create audio decoder failed");
        // 音频播放初始化
        audioRenderer_->AudioRendererInit(sampleInfo_, audioDecContext_);
    }

    ret = CreateVideoDecoder();
    CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, ret, "Create video decoder failed");

    isReleased_ = false;
    AVCODEC_SAMPLE_LOGI("Succeed");
    return AVCODEC_SAMPLE_ERR_OK;
}

int32_t Player::Start()
{
    std::lock_guard<std::mutex> lock(mutex_);
    CHECK_AND_RETURN_RET_LOG(!isStarted_, AVCODEC_SAMPLE_ERR_ERROR, "Already started.");
    CHECK_AND_RETURN_RET_LOG(demuxer_ != nullptr && videoDecoder_ != nullptr, AVCODEC_SAMPLE_ERR_ERROR,
                             "Already started.");
    int32_t ret;
    if (videoDecContext_) {
        ret = videoDecoder_->Start();
        CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, ret, "Decoder start failed");
        isStarted_ = true;
        videoDecInputThread_ = std::make_unique<std::thread>(&Player::VideoDecInputThread, this);
        videoDecOutputThread_ = std::make_unique<std::thread>(&Player::VideoDecOutputThread, this);
        if (videoDecInputThread_ == nullptr || videoDecOutputThread_ == nullptr) {
            AVCODEC_SAMPLE_LOGE("Create thread failed");
            StartRelease();
            return AVCODEC_SAMPLE_ERR_ERROR;
        }
    }

    if (audioDecContext_) {
        ret = audioDecoder_->Start();
        CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, ret, "Audio Decoder start failed");
        isStarted_ = true;
        audioDecInputThread_ = std::make_unique<std::thread>(&Player::AudioDecInputThread, this);
        audioDecOutputThread_ = std::make_unique<std::thread>(&Player::AudioDecOutputThread, this);
        if (audioDecInputThread_ == nullptr || audioDecOutputThread_ == nullptr) {
            AVCODEC_SAMPLE_LOGE("Create thread failed");
            StartRelease();
            return AVCODEC_SAMPLE_ERR_ERROR;
        }

        // 清空播放缓存
        if (audioDecContext_) {
            audioDecContext_->ClearCache();
        }

        // 开启音频播放
        audioRenderer_->AudioRendererStart();
    }

    AVCODEC_SAMPLE_LOGI("Succeed");
    doneCond_.notify_all();
    return AVCODEC_SAMPLE_ERR_OK;
}

void Player::StartRelease()
{
    if (!isReleased_) {
        isReleased_ = true;
        // 释放线程等资源
        Release();
    }
}

void Player::Release()
{
    std::lock_guard<std::mutex> lock(mutex_);
    isStarted_ = false;

    if (audioRenderer_ != nullptr) {
        audioRenderer_->AudioRendererRelease();
        audioRenderer_.reset();
    }
    if (videoDecInputThread_ && videoDecInputThread_->joinable()) {
        videoDecContext_->inputCond_.notify_all();  // 必须要用同一个锁才能发送出去吗？？？
        videoDecInputThread_->join();
        videoDecInputThread_.reset();
    }
    if (videoDecOutputThread_ && videoDecOutputThread_->joinable()) {
        videoDecContext_->outputCond_.notify_all();
        videoDecOutputThread_->join();
        videoDecOutputThread_.reset();
    }
    if (audioDecInputThread_ && audioDecInputThread_->joinable()) {
        audioDecContext_->inputCond_.notify_all();
        audioDecInputThread_->join();
        audioDecInputThread_.reset();
    }
    if (audioDecOutputThread_ && audioDecOutputThread_->joinable()) {
        audioDecContext_->outputCond_.notify_all();
        audioDecOutputThread_->join();
        audioDecOutputThread_.reset();
    }
    if (demuxer_ != nullptr) {
        demuxer_->Release();
        demuxer_.reset();
    }
    if (videoDecoder_ != nullptr) {
        videoDecoder_->Release();
        videoDecoder_.reset();
    }
    if (videoDecContext_ != nullptr) {
        delete videoDecContext_;
        videoDecContext_ = nullptr;
    }
    if (audioDecoder_ != nullptr) {
        audioDecoder_->Release();
        audioDecoder_.reset();
    }
    if (audioDecContext_ != nullptr) {
        delete audioDecContext_;
        audioDecContext_ = nullptr;
    }

    doneCond_.notify_all();
    // 触发回调
    if (sampleInfo_.PlayDoneCallback != nullptr) {
        sampleInfo_.PlayDoneCallback(sampleInfo_.playDoneCallbackData);
    }

    AVCODEC_SAMPLE_LOGI("Succeed");
}

void Player::VideoDecInputThread()
{
    while (true) {
        CHECK_AND_BREAK_LOG(isStarted_, "Decoder input thread out");
        std::unique_lock<std::mutex> lock(videoDecContext_->inputMutex_);
        bool condRet = videoDecContext_->inputCond_.wait_for(
            lock, 5s, [this]() { return !isStarted_ || !videoDecContext_->inputBufferInfoQueue_.empty(); });
        CHECK_AND_BREAK_LOG(isStarted_, "Work done, thread out");
        CHECK_AND_CONTINUE_LOG(!videoDecContext_->inputBufferInfoQueue_.empty(),
                               "Buffer queue is empty, continue, cond ret: %{public}d", condRet);

        CodecBufferInfo bufferInfo = videoDecContext_->inputBufferInfoQueue_.front();
        videoDecContext_->inputBufferInfoQueue_.pop();
        videoDecContext_->inputFrameCount_++;
        lock.unlock();

        demuxer_->ReadSample(demuxer_->GetVideoTrackId(), reinterpret_cast<OH_AVBuffer *>(bufferInfo.buffer),
                             bufferInfo.attr);

        int32_t ret = videoDecoder_->PushInputBuffer(bufferInfo);
        CHECK_AND_BREAK_LOG(ret == AVCODEC_SAMPLE_ERR_OK, "Push data failed, thread out");
    }
}

void Player::VideoDecOutputThread()
{
    sampleInfo_.frameInterval = MICROSECOND_TO_S / sampleInfo_.frameRate;
    int64_t PER_SINK_TIME_THRESHOLD = MS_TO_S / sampleInfo_.frameRate * MS_TO_S;  // max per sink time
    while (true) {
        thread_local auto lastPushTime = std::chrono::system_clock::now();
        CHECK_AND_BREAK_LOG(isStarted_, "Decoder output thread out");
        std::unique_lock<std::mutex> lock(videoDecContext_->outputMutex_);
        bool condRet = videoDecContext_->outputCond_.wait_for(
            lock, 5s, [this]() { return !isStarted_ || !videoDecContext_->outputBufferInfoQueue_.empty(); });
        CHECK_AND_BREAK_LOG(isStarted_, "Decoder output thread out");
        CHECK_AND_CONTINUE_LOG(!videoDecContext_->outputBufferInfoQueue_.empty(),
                               "Buffer queue is empty, continue, cond ret: %{public}d", condRet);

        CodecBufferInfo bufferInfo = videoDecContext_->outputBufferInfoQueue_.front();
        videoDecContext_->outputBufferInfoQueue_.pop();
        CHECK_AND_BREAK_LOG(!(bufferInfo.attr.flags & AVCODEC_BUFFER_FLAGS_EOS), "Catch EOS, thread out");
        videoDecContext_->outputFrameCount_++;
        AVCODEC_SAMPLE_LOGW("Out buffer count: %{public}u, size: %{public}d, flag: %{public}u, pts: %{public}" PRId64,
                            videoDecContext_->outputFrameCount_, bufferInfo.attr.size, bufferInfo.attr.flags,
                            bufferInfo.attr.pts);
        lock.unlock();

        if (!audioDecContext_) {
            int32_t ret = videoDecoder_->FreeOutputBuffer(bufferInfo.bufferIndex, true);
            CHECK_AND_BREAK_LOG(ret == AVCODEC_SAMPLE_ERR_OK, "Decoder output thread out");

            std::this_thread::sleep_until(lastPushTime + std::chrono::microseconds(sampleInfo_.frameInterval));
            lastPushTime = std::chrono::system_clock::now();
        } else {
            // get audio render position
            int64_t framePosition = 0;
            int64_t timestamp = 0;
            int32_t ret = audioRenderer_->GetTimestamp(CLOCK_MONOTONIC, &framePosition, &timestamp);
            AVCODEC_SAMPLE_LOGI("VD framePosition: %{public}li, nowTimeStamp: %{public}li", framePosition,
                                nowTimeStamp);
            audioTimeStamp = timestamp;  // ns
            // audio render getTimeStamp error, render it
            if (ret != AUDIOSTREAM_SUCCESS || (timestamp == 0) || (framePosition == 0)) {
                // first frame, render without wait
                ret = videoDecoder_->FreeOutputBuffer(bufferInfo.bufferIndex, true, GetCurrentTime());
                CHECK_AND_BREAK_LOG(ret == AVCODEC_SAMPLE_ERR_OK, "Decoder output thread out");
                std::this_thread::sleep_until(lastPushTime + std::chrono::microseconds(sampleInfo_.frameInterval));
                lastPushTime = std::chrono::system_clock::now();
                continue;
            }
            // after seek, audio render flush, framePosition = 0, then writtenSampleCnt = 0
            int64_t latency = (writtenSampleCnt - framePosition) * 1000 * 1000 / sampleInfo_.audioSampleRate;
            AVCODEC_SAMPLE_LOGI("VD latency: %{public}li writtenSampleCnt: %{public}li", latency, writtenSampleCnt);

            nowTimeStamp = GetCurrentTime();
            int64_t anchordiff = (nowTimeStamp - audioTimeStamp) / 1000;

            int64_t audioPlayedTime = audioBufferPts - latency + anchordiff;  // us, audio buffer accelerate render time
            int64_t videoPlayedTime = bufferInfo.attr.pts;                    // us, video buffer expected render time

            // audio render timestamp and now timestamp diff
            int64_t waitTimeUs = videoPlayedTime - audioPlayedTime;  // us

            AVCODEC_SAMPLE_LOGI("VD bufferInfo.bufferIndex: %{public}u", bufferInfo.bufferIndex);
            AVCODEC_SAMPLE_LOGI(
                "VD audioPlayedTime: %{public}li, videoPlayedTime: %{public}li, nowTimeStamp_:{public}ld, "
                "audioTimeStamp_ :{public}ld, waitTimeUs :{public}ld, anchordiff :%{public}ld",
                audioPlayedTime, videoPlayedTime, nowTimeStamp, audioTimeStamp, waitTimeUs, anchordiff);

            bool dropFrame = false;

            // video buffer is too late, drop it
            if (waitTimeUs < WAIT_TIME_US_THRESHOLD_WARNING) {
                dropFrame = true;
                AVCODEC_SAMPLE_LOGI("VD buffer is too late");
            } else {
                AVCODEC_SAMPLE_LOGE("VD buffer is too early waitTimeUs:%{public}ld", waitTimeUs);
                // [0, ), render it wait waitTimeUs, max 1s
                // [-40, 0), render it
                if (waitTimeUs > WAIT_TIME_US_THRESHOLD) {
                    waitTimeUs = WAIT_TIME_US_THRESHOLD;
                }
                // per frame render time reduced by 33ms
                if (waitTimeUs > sampleInfo_.frameInterval + PER_SINK_TIME_THRESHOLD) {
                    waitTimeUs = sampleInfo_.frameInterval + PER_SINK_TIME_THRESHOLD;
                    AVCODEC_SAMPLE_LOGE("VD buffer is too early and reduced 33ms, waitTimeUs: %{public}ld", waitTimeUs);
                }
            }

            if (static_cast<double>(waitTimeUs) > VSYNC_TIME * LIP_SYNC_BALANCE_VALUE) {
                std::this_thread::sleep_for(std::chrono::microseconds(
                    static_cast<int64_t>(static_cast<double>(waitTimeUs) - VSYNC_TIME * LIP_SYNC_BALANCE_VALUE)));
            }

            ret = videoDecoder_->FreeOutputBuffer(bufferInfo.bufferIndex, !dropFrame,
                                                  VSYNC_TIME * LIP_SYNC_BALANCE_VALUE * 1000 + GetCurrentTime());
            CHECK_AND_BREAK_LOG(ret == AVCODEC_SAMPLE_ERR_OK, "Decoder output thread");
        }
    }

    writtenSampleCnt = 0;
    audioBufferPts = 0;
}

void Player::AudioDecInputThread()
{
    while (true) {
        CHECK_AND_BREAK_LOG(isStarted_, "Decoder input thread out");
        std::unique_lock<std::mutex> lock(audioDecContext_->inputMutex_);
        bool condRet = audioDecContext_->inputCond_.wait_for(
            lock, 5s, [this]() { return !isStarted_ || !audioDecContext_->inputBufferInfoQueue_.empty(); });
        CHECK_AND_BREAK_LOG(isStarted_, "Work done, thread out");
        CHECK_AND_CONTINUE_LOG(!audioDecContext_->inputBufferInfoQueue_.empty(),
                               "Buffer queue is empty, continue, cond ret: %{public}d", condRet);

        CodecBufferInfo bufferInfo = audioDecContext_->inputBufferInfoQueue_.front();
        audioDecContext_->inputBufferInfoQueue_.pop();
        audioDecContext_->inputFrameCount_++;
        lock.unlock();

        demuxer_->ReadSample(demuxer_->GetAudioTrackId(), reinterpret_cast<OH_AVBuffer *>(bufferInfo.buffer),
                             bufferInfo.attr);

        int32_t ret = audioDecoder_->PushInputBuffer(bufferInfo);
        CHECK_AND_BREAK_LOG(ret == AVCODEC_SAMPLE_ERR_OK, "Push data failed, thread out");

        CHECK_AND_BREAK_LOG(!(bufferInfo.attr.flags & AVCODEC_BUFFER_FLAGS_EOS), "Catch EOS, thread out");
    }
}

void Player::AudioDecOutputThread()
{
    while (true) {
        CHECK_AND_BREAK_LOG(isStarted_, "Decoder output thread out");
        std::unique_lock<std::mutex> lock(audioDecContext_->outputMutex_);
        bool condRet = audioDecContext_->outputCond_.wait_for(
            lock, 5s, [this]() { return !isStarted_ || !audioDecContext_->outputBufferInfoQueue_.empty(); });
        CHECK_AND_BREAK_LOG(isStarted_, "Decoder output thread out");
        CHECK_AND_CONTINUE_LOG(!audioDecContext_->outputBufferInfoQueue_.empty(),
                               "Buffer queue is empty, continue, cond ret: %{public}d", condRet);

        CodecBufferInfo bufferInfo = audioDecContext_->outputBufferInfoQueue_.front();
        audioDecContext_->outputBufferInfoQueue_.pop();

        CHECK_AND_BREAK_LOG(!(bufferInfo.attr.flags & AVCODEC_BUFFER_FLAGS_EOS), "Catch EOS, thread out");
        audioDecContext_->outputFrameCount_++;
        AVCODEC_SAMPLE_LOGW("Out buffer count: %{public}u, size: %{public}d, flag: %{public}u, pts: %{public}" PRId64,
                            audioDecContext_->outputFrameCount_, bufferInfo.attr.size, bufferInfo.attr.flags,
                            bufferInfo.attr.pts);
        uint8_t *source = OH_AVBuffer_GetAddr(reinterpret_cast<OH_AVBuffer *>(bufferInfo.buffer));
        // 将解码后的PMC数据放入队列中
        audioDecContext_->WriteCache(source, bufferInfo.attr.size);
        lock.unlock();

        int32_t ret = audioDecoder_->FreeOutputBuffer(bufferInfo.bufferIndex, true);
        CHECK_AND_BREAK_LOG(ret == AVCODEC_SAMPLE_ERR_OK, "Decoder output thread out");

        writtenSampleCnt += (bufferInfo.attr.size / sampleInfo_.audioChannelCount / BYTES_PER_SAMPLE_2);
        AVCODEC_SAMPLE_LOGI("writtenSampleCnt_: %ld, bufferInfo.attr.size: %i, sampleInfo_.audioChannelCount: %i",
                            writtenSampleCnt, bufferInfo.attr.size, sampleInfo_.audioChannelCount);
        audioBufferPts = bufferInfo.attr.pts;

        std::unique_lock<std::mutex> lockRender(audioDecContext_->renderMutex);
        audioDecContext_->renderCond.wait_for(lockRender, 20ms, [this, bufferInfo]() {
            return audioDecContext_->remainlen_ < BALANCE_VALUE * bufferInfo.attr.size;
        });
    }
    AVCODEC_SAMPLE_LOGI("Out buffer end");
}

int64_t Player::GetCurrentTime()
{
    int64_t result = -1;  // -1 for bad result.
    struct timespec time;
    clockid_t clockId = CLOCK_MONOTONIC;
    int ret = clock_gettime(clockId, &time);
    CHECK_AND_RETURN_RET_LOG(ret >= 0, result, "GetCurNanoTime fail, result:%{public}i", ret);
    result = (time.tv_sec * NANO_TO_S) + time.tv_nsec;
    return result;
}

int32_t Player::CreateVideoDecoder()
{
    int ret = videoDecoder_->Create(sampleInfo_.videoCodecMime);
    CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, ret, "Create video decoder failed");

    videoDecContext_ = new CodecUserData;
    ret = videoDecoder_->Config(sampleInfo_, videoDecContext_);
    CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, ret, "Decoder config failed");
    return AVCODEC_SAMPLE_ERR_OK;
}

int32_t Player::CreateAudioDecoder()
{
    AVCODEC_SAMPLE_LOGW("audio mime:%{public}s", sampleInfo_.audioCodecMime.c_str());
    int32_t ret = audioDecoder_->Create(sampleInfo_.audioCodecMime);
    if (ret != AVCODEC_SAMPLE_ERR_OK) {
        AVCODEC_SAMPLE_LOGE("Create audio decoder failed, mime:%{public}s", sampleInfo_.audioCodecMime.c_str());
    } else {
        audioDecContext_ = new CodecUserData;
        ret = audioDecoder_->Config(sampleInfo_, audioDecContext_);
        CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, ret, "Audio Decoder config failed");
    }
    return AVCODEC_SAMPLE_ERR_OK;
}
