#include "Hdr2Sdr.h"
#include <unistd.h>

#undef LOG_TAG
#define LOG_TAG "HDR2SDR"


namespace {
constexpr int BALANCE_VALUE = 5;  
using namespace std::chrono_literals;
}

int32_t Hdr2Sdr::maxNumOfInstance = 3;
std::vector<Hdr2Sdr *> Hdr2Sdr::multiSurfaceList;

Hdr2Sdr::Hdr2Sdr(int32_t id) {
    AVCODEC_SAMPLE_LOGI("Sample id is %{public}i", id + 1);
    this->id = id;
}

Hdr2Sdr::~Hdr2Sdr() {StartRelease();}

Hdr2Sdr *Hdr2Sdr::getInstance(int32_t id) {
    if (multiSurfaceList.empty()) {
        for (int32_t i = 0; i < maxNumOfInstance; ++i) {
            Hdr2Sdr *emp = new Hdr2Sdr(i);
            multiSurfaceList.emplace_back(emp);
        }
    }
    if (id > -1 && id < maxNumOfInstance) {
        return multiSurfaceList.at(id);
    }
    return nullptr;
}

void Hdr2Sdr::releaseInstance() { multiSurfaceList.clear(); }

void Hdr2Sdr::SetMaxNum(int32_t num) { Hdr2Sdr::maxNumOfInstance = num; }

int32_t Hdr2Sdr::CreateAudioDecoder()
{
    AVCODEC_SAMPLE_LOGW("audio mime:%{public}s", sampleInfo_.audioCodecMime.c_str());
    int32_t ret = audioDecoder_->Create(sampleInfo_.audioCodecMime);
    if (ret != AVCODEC_SAMPLE_ERR_OK) {
        AVCODEC_SAMPLE_LOGE("Create audio decoder failed, mime:%{public}s", sampleInfo_.audioCodecMime.c_str());
    } else {
        audioDecContext_ = new CodecUserData;
        ret = audioDecoder_->Config(sampleInfo_, audioDecContext_);
        CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, ret, "Audio Decoder config failed");
        OH_AudioStreamBuilder_Create(&builder_, AUDIOSTREAM_TYPE_RENDERER);
        OH_AudioStreamBuilder_SetLatencyMode(builder_, AUDIOSTREAM_LATENCY_MODE_NORMAL);
        // 设置音频采样率
        OH_AudioStreamBuilder_SetSamplingRate(builder_, sampleInfo_.audioSampleRate);
        // 设置音频声道
        OH_AudioStreamBuilder_SetChannelCount(builder_, sampleInfo_.audioChannelCount);
        // 设置音频采样格式
        OH_AudioStreamBuilder_SetSampleFormat(builder_, AUDIOSTREAM_SAMPLE_S16LE);
        // 设置音频流的编码类型
        OH_AudioStreamBuilder_SetEncodingType(builder_, AUDIOSTREAM_ENCODING_TYPE_RAW);
        // 设置输出音频流的工作场景
        OH_AudioStreamBuilder_SetRendererInfo(builder_, AUDIOSTREAM_USAGE_MUSIC);
        AVCODEC_SAMPLE_LOGW("Init audioSampleRate: %{public}d, ChannelCount: %{public}d", sampleInfo_.audioSampleRate,
                            sampleInfo_.audioChannelCount);
        OH_AudioRenderer_Callbacks callbacks;
        // 配置回调函数
#ifndef DEBUG_DECODE
        callbacks.OH_AudioRenderer_OnWriteData = SampleCallback::OnRenderWriteData;
#else
        callbacks.OH_AudioRenderer_OnWriteData = nullptr;
#endif
        callbacks.OH_AudioRenderer_OnStreamEvent = SampleCallback::OnRenderStreamEvent;
        callbacks.OH_AudioRenderer_OnInterruptEvent = SampleCallback::OnRenderInterruptEvent;
        callbacks.OH_AudioRenderer_OnError = SampleCallback::OnRenderError;
        // 设置输出音频流的回调
        OH_AudioStreamBuilder_SetRendererCallback(builder_, callbacks, audioDecContext_);
        OH_AudioStreamBuilder_GenerateRenderer(builder_, &audioRenderer_);
    }
    return AVCODEC_SAMPLE_ERR_OK;
}

int32_t Hdr2Sdr::CreateVideoDecoder()
{
    AVCODEC_SAMPLE_LOGW("video mime:%{public}s", sampleInfo_.videoCodecMime.c_str());
    int32_t ret = videoDecoder_->Create(sampleInfo_.videoCodecMime);
    if (ret != AVCODEC_SAMPLE_ERR_OK) {
        AVCODEC_SAMPLE_LOGW("Create video decoder failed, mime:%{public}s", sampleInfo_.videoCodecMime.c_str());
    } else {
        videoDecContext_ = new CodecUserData;
        ret = videoDecoder_->Config(sampleInfo_, videoDecContext_);
        CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, ret, "Video Decoder config failed");
    }
    return AVCODEC_SAMPLE_ERR_OK;
}

int32_t Hdr2Sdr::Init(SampleInfo &sampleInfo) {
    std::lock_guard<std::mutex> lock(mutex_);
    CHECK_AND_RETURN_RET_LOG(!isStarted, AVCODEC_SAMPLE_ERR_ERROR, "Already started.");
    CHECK_AND_RETURN_RET_LOG(demuxer_ == nullptr && videoDecoder_ == nullptr && audioDecoder_ == nullptr,
        AVCODEC_SAMPLE_ERR_ERROR, "Already started.");

    sampleInfo_ = sampleInfo;
    
    videoDecoder_ = std::make_unique<VideoDecoder>();
//     audioDecoder_ = std::make_unique<AudioDecoder>();
    demuxer_ = std::make_unique<Demuxer>();

    int32_t ret = demuxer_->Create(sampleInfo_);
    CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, ret, "Create demuxer failed");
    
    sampleInfo.durationTime = sampleInfo_.durationTime;
    sampleInfo.videoWidth = sampleInfo_.videoWidth;
    sampleInfo.videoHeight = sampleInfo_.videoHeight;
    
//     ret = CreateAudioDecoder();
//     CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, ret, "Create audio decoder failed");
    
    ret = CreateVideoDecoder();
    CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, ret, "Create video decoder failed");

    releaseThread_ = nullptr;
    AVCODEC_SAMPLE_LOGI("Succeed");
    return AVCODEC_SAMPLE_ERR_OK;
}

int32_t Hdr2Sdr::Start() {
    std::lock_guard<std::mutex> lock(mutex_);
    int32_t ret;
    CHECK_AND_RETURN_RET_LOG(!isStarted, AVCODEC_SAMPLE_ERR_ERROR, "Already started.");
    CHECK_AND_RETURN_RET_LOG(demuxer_ != nullptr, AVCODEC_SAMPLE_ERR_ERROR, "Already started.");
//     if (videoDecContext_) {
        ret = videoDecoder_->Start();
        CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, ret, "Video Decoder start failed");
        isStarted = true;
        isPause = false;
        videoDecInputThread_ = std::make_unique<std::thread>(&Hdr2Sdr::VideoDecInputThread, this);
        videoDecOutputThread_ = std::make_unique<std::thread>(&Hdr2Sdr::VideoDecOutputThread, this);

        if (videoDecInputThread_ == nullptr || videoDecOutputThread_ == nullptr) {
            AVCODEC_SAMPLE_LOGE("Create thread failed");
            StartRelease();
            return AVCODEC_SAMPLE_ERR_ERROR;
        }
//     }
//     if (audioDecContext_) {
//         ret = audioDecoder_->Start();
//         CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, ret, "Audio Decoder start failed");
//         isStarted = true;
//         isPause = false;
//         audioDecInputThread_ = std::make_unique<std::thread>(&Hdr2Sdr::AudioDecInputThread, this);
//         audioDecOutputThread_ = std::make_unique<std::thread>(&Hdr2Sdr::AudioDecOutputThread, this);
// #ifdef DEBUG_DECODE
//         // for debug 解码数据写入沙箱地址,物理地址为/data/app/el2/100/base/com.example.avcodecsample/haps/entry/files/
//         audioOutputFile_.open("/data/storage/el2/base/haps/entry/files/audio_decode_out.pcm",
//             std::ios::out | std::ios::binary);
// #endif
//         if (audioDecInputThread_ == nullptr || audioDecOutputThread_ == nullptr) {
//             AVCODEC_SAMPLE_LOGE("Create thread failed");
//             StartRelease();
//             return AVCODEC_SAMPLE_ERR_ERROR;
//         }
//     }
//     // 清空队列
//     while (audioDecContext_ && !audioDecContext_->renderQueue.empty()) {
//         audioDecContext_->renderQueue.pop();
//     }
//     if (audioRenderer_) {
//         OH_AudioRenderer_Start(audioRenderer_);
//     }
    AVCODEC_SAMPLE_LOGI("Succeed");
    doneCond_.notify_all();
    return AVCODEC_SAMPLE_ERR_OK;
}

void Hdr2Sdr::StartRelease() {
//     if (audioRenderer_) {
//         OH_AudioRenderer_Stop(audioRenderer_);
//     }
//     if (releaseThread_ == nullptr) {
//         AVCODEC_SAMPLE_LOGI("Start to release");
//         releaseThread_ = std::make_unique<std::thread>(&Hdr2Sdr::Release, this);
//     }
    Release();
}

void Hdr2Sdr::ReleaseThread()
{
    if (videoDecInputThread_ && videoDecInputThread_->joinable()) {
        videoDecInputThread_->detach();
        videoDecInputThread_.reset();
    }
    if (videoDecOutputThread_ && videoDecOutputThread_->joinable()) {
        videoDecOutputThread_->detach();
        videoDecOutputThread_.reset();
    }
//     if (audioDecInputThread_ && audioDecInputThread_->joinable()) {
//         audioDecInputThread_->detach();
//         audioDecInputThread_.reset();
//     }
//     if (audioDecOutputThread_ && audioDecOutputThread_->joinable()) {
//         audioDecOutputThread_->detach();
//         audioDecOutputThread_.reset();
//     }
}

void Hdr2Sdr::Release() {
    std::lock_guard<std::mutex> lock(mutex_);
    isStarted = false;

//     // 清空队列
//     while (audioDecContext_ && !audioDecContext_->renderQueue.empty()) {
//         audioDecContext_->renderQueue.pop();
//     }
//     if (audioRenderer_ != nullptr) {
//         OH_AudioRenderer_Release(audioRenderer_);
//         audioRenderer_ = nullptr;
//     }
// #ifdef DEBUG_DECODE
//     if (audioOutputFile_.is_open()) {
//         audioOutputFile_.close();
//     }
// #endif
//     ReleaseThread();
    
    if (videoDecInputThread_ && videoDecInputThread_->joinable()) {
        videoDecInputThread_->detach();
        videoDecInputThread_.reset();
    }
    if (videoDecOutputThread_ && videoDecOutputThread_->joinable()) {
        videoDecOutputThread_->detach();
        videoDecOutputThread_.reset();
    }
    if (videoDecoder_ != nullptr) {
        videoDecoder_->Release();
        videoDecoder_.reset();
    }
//     if (sampleInfo_.window != nullptr) {
//         OH_NativeWindow_DestroyNativeWindow(sampleInfo_.window);
//         sampleInfo_.window = nullptr;
//     }
    if (videoDecContext_ != nullptr) {
        delete videoDecContext_;
        videoDecContext_ = nullptr;
    }
    if (demuxer_ != nullptr) {
        demuxer_->Release();
        demuxer_.reset();
    }
//     if (audioDecoder_ != nullptr) {
//         audioDecoder_->Release();
//         audioDecoder_.reset();
//     }
//     if (audioDecContext_ != nullptr) {
//         delete audioDecContext_;
//         audioDecContext_ = nullptr;
//     }
//     OH_AudioStreamBuilder_Destroy(builder_);
    doneCond_.notify_all();
    AVCODEC_SAMPLE_LOGI("Succeed");
}

void Hdr2Sdr::VideoDecInputThread() {
    while (true) {
        CHECK_AND_BREAK_LOG(isStarted, "Decoder input thread out");
        std::unique_lock<std::mutex> lock((videoDecContext_->inputMutex));
        bool condRet = videoDecContext_->inputCond.wait_for(
            lock, 5s, [this]() { return !isPause.load() && (!isStarted || !videoDecContext_->inputBufferInfoQueue.empty()); });
        CHECK_AND_BREAK_LOG(isStarted, "Work done, thread out");
        CHECK_AND_CONTINUE_LOG(!videoDecContext_->inputBufferInfoQueue.empty(),
                               "Buffer queue is empty, continue, cond ret: %{public}d", condRet);

        CodecBufferInfo bufferInfo = videoDecContext_->inputBufferInfoQueue.front();
        videoDecContext_->inputBufferInfoQueue.pop();
        videoDecContext_->inputFrameCount++;
        lock.unlock();

        int32_t ret = demuxer_->ReadSample(demuxer_->GetVideoTrackId(), reinterpret_cast<OH_AVBuffer *>(bufferInfo.buffer),
            bufferInfo.attr);
        CHECK_AND_BREAK_LOG(ret == AVCODEC_SAMPLE_ERR_OK, "ReadSample failed, thread out");

        if ((bufferInfo.attr.flags & AVCODEC_BUFFER_FLAGS_EOS) && isLoop) {
            bufferInfo.attr.flags = AVCODEC_BUFFER_FLAGS_NONE;
            int32_t ret = demuxer_->Seek(0);
            CHECK_AND_BREAK_LOG(ret == AVCODEC_SAMPLE_ERR_OK, "Loop failed, thread out");
        }

        ret = videoDecoder_->PushInputBuffer(bufferInfo);
        CHECK_AND_BREAK_LOG(ret == AVCODEC_SAMPLE_ERR_OK, "Push data failed, thread out");
        CHECK_AND_BREAK_LOG(!(bufferInfo.attr.flags & AVCODEC_BUFFER_FLAGS_EOS), "Push EOS frame, thread out");
    }
    AVCODEC_SAMPLE_LOGI("Exit, frame count: %{public}u", videoDecContext_->inputFrameCount);
}

void Hdr2Sdr::VideoDecOutputThread() {
    sampleInfo_.frameInterval = MICROSECOND / sampleInfo_.frameRate;
    while (true) {
        thread_local auto lastPushTime = std::chrono::system_clock::now();
        CHECK_AND_BREAK_LOG(isStarted, "Decoder output thread out");
        std::unique_lock<std::mutex> lock(videoDecContext_->outputMutex);
        bool condRet = videoDecContext_->outputCond.wait_for(lock, 5s, [this]() {
            return !isPause.load() && (!isStarted || !videoDecContext_->outputBufferInfoQueue.empty()); });
        CHECK_AND_BREAK_LOG(isStarted, "Decoder output thread out");
        CHECK_AND_CONTINUE_LOG(!videoDecContext_->outputBufferInfoQueue.empty(),
                               "Buffer queue is empty, continue, cond ret: %{public}d", condRet);

        CodecBufferInfo bufferInfo = videoDecContext_->outputBufferInfoQueue.front();
        videoDecContext_->outputBufferInfoQueue.pop();
        CHECK_AND_BREAK_LOG(!(bufferInfo.attr.flags & AVCODEC_BUFFER_FLAGS_EOS), "Catch EOS, thread out");
        videoDecContext_->outputFrameCount++;
        AVCODEC_SAMPLE_LOGW("Out buffer count: %{public}u, size: %{public}d, flag: %{public}u, pts: %{public}" PRId64,
                            videoDecContext_->outputFrameCount, bufferInfo.attr.size, bufferInfo.attr.flags,
                            bufferInfo.attr.pts);
        lock.unlock();

//         if (autoSwitchSurface && (videoDecContext_->outputFrameCount % (int32_t)sampleInfo_.frameRate == 0)) {
//             switchSurfaceFlag = (switchSurfaceFlag == 1) ? 0 : 1;
//             AVCODEC_SAMPLE_LOGI("surface change %{public}i", switchSurfaceFlag);
//             if (switchSurfaceFlag) {
//                 switch (sampleId) {
//                     case 1 : {
//                         SetSurface("Surface1_2");
//                         break;
//                     }
//                     case 2 : {
//                         SetSurface("Surface1_1");
//                         break;
//                     }
//                     default: {
//                         AVCODEC_SAMPLE_LOGI("sample id error %{public}i", sampleId);
//                         break;
//                     }
//                 }
//             } else {
//                 switch (sampleId) {
//                     case 1 : {
//                         SetSurface("Surface1_1");
//                         break;
//                     }
//                     case 2 : {
//                         SetSurface("Surface1_2");
//                         break;
//                     }
//                     default: {
//                         AVCODEC_SAMPLE_LOGI("sample id error %{public}i", sampleId);
//                         break;
//                     }
//                 }
//             }
//         }

        int32_t ret = videoDecoder_->FreeOutputBuffer(bufferInfo.bufferIndex, true);
        CHECK_AND_BREAK_LOG(ret == AVCODEC_SAMPLE_ERR_OK, "Decoder output thread out");
        
        std::this_thread::sleep_until(lastPushTime + std::chrono::microseconds(sampleInfo_.frameInterval));
        lastPushTime = std::chrono::system_clock::now();
    }
    AVCODEC_SAMPLE_LOGI("Exit, frame count: %{public}u", videoDecContext_->outputFrameCount);
    StartRelease();
}

void Hdr2Sdr::AudioDecInputThread()
{
    while (true) {
        CHECK_AND_BREAK_LOG(isStarted, "Decoder input thread out");
        std::unique_lock<std::mutex> lock(audioDecContext_->inputMutex);
        bool condRet = audioDecContext_->inputCond.wait_for(
            lock, 5s, [this]() { return !isPause.load() && (!isStarted || !audioDecContext_->inputBufferInfoQueue.empty()) ; });
        CHECK_AND_BREAK_LOG(isStarted, "Work done, thread out");
        CHECK_AND_CONTINUE_LOG(!audioDecContext_->inputBufferInfoQueue.empty(),
                               "Buffer queue is empty, continue, cond ret: %{public}d", condRet);

        CodecBufferInfo bufferInfo = audioDecContext_->inputBufferInfoQueue.front();
        audioDecContext_->inputBufferInfoQueue.pop();
        audioDecContext_->inputFrameCount++;
        lock.unlock();

        demuxer_->ReadSample(demuxer_->GetAudioTrackId(), reinterpret_cast<OH_AVBuffer *>(bufferInfo.buffer),
            bufferInfo.attr);

        int32_t ret = audioDecoder_->PushInputBuffer(bufferInfo);
        CHECK_AND_BREAK_LOG(ret == AVCODEC_SAMPLE_ERR_OK, "Push data failed, thread out");
        
        CHECK_AND_BREAK_LOG(!(bufferInfo.attr.flags & AVCODEC_BUFFER_FLAGS_EOS), "Catch EOS, thread out");
    }
}

void Hdr2Sdr::AudioDecOutputThread()
{
    while (true) {
        CHECK_AND_BREAK_LOG(isStarted, "Decoder output thread out");
        std::unique_lock<std::mutex> lock(audioDecContext_->outputMutex);
        bool condRet = audioDecContext_->outputCond.wait_for(
            lock, 5s, [this]() { return !isPause.load() && (!isStarted || !audioDecContext_->outputBufferInfoQueue.empty()); });
        CHECK_AND_BREAK_LOG(isStarted, "Decoder output thread out");
        CHECK_AND_CONTINUE_LOG(!audioDecContext_->outputBufferInfoQueue.empty(),
                               "Buffer queue is empty, continue, cond ret: %{public}d", condRet);

        CodecBufferInfo bufferInfo = audioDecContext_->outputBufferInfoQueue.front();
        audioDecContext_->outputBufferInfoQueue.pop();
        CHECK_AND_BREAK_LOG(!(bufferInfo.attr.flags & AVCODEC_BUFFER_FLAGS_EOS), "Catch EOS, thread out");
        audioDecContext_->outputFrameCount++;
        AVCODEC_SAMPLE_LOGW("Out buffer count: %{public}u, size: %{public}d, flag: %{public}u, pts: %{public}" PRId64,
                            audioDecContext_->outputFrameCount, bufferInfo.attr.size, bufferInfo.attr.flags,
                            bufferInfo.attr.pts);
        uint8_t *source = OH_AVBuffer_GetAddr(reinterpret_cast<OH_AVBuffer *>(bufferInfo.buffer));
        // 将解码后的PMC数据放入队列中
        for (int i = 0; i < bufferInfo.attr.size; i++) {
            audioDecContext_->renderQueue.push(*(source + i));
        }
#ifdef DEBUG_DECODE
        if (audioOutputFile_.is_open()) {
            audioOutputFile_.write((const char*)OH_AVBuffer_GetAddr(reinterpret_cast<OH_AVBuffer *>(bufferInfo.buffer)),
                bufferInfo.attr.size);
        }
#endif
        lock.unlock();

        int32_t ret = audioDecoder_->FreeOutputBuffer(bufferInfo.bufferIndex, true);
        CHECK_AND_BREAK_LOG(ret == AVCODEC_SAMPLE_ERR_OK, "Decoder output thread out");

        std::unique_lock<std::mutex> lockRender(audioDecContext_->renderMutex);
        audioDecContext_->renderCond.wait_for(lockRender, 20ms,
            [this, bufferInfo]() {
                return audioDecContext_->renderQueue.size() < BALANCE_VALUE * bufferInfo.attr.size;
            });
    }
    AVCODEC_SAMPLE_LOGI("Out buffer end");
    StartRelease();
}

int32_t Hdr2Sdr::Stop() {
    Hdr2Sdr::StartRelease();
    return WaitForDone();
}

int32_t Hdr2Sdr::Pause() {
    isPause.store(true);
    return AVCODEC_SAMPLE_ERR_OK;
}

int32_t Hdr2Sdr::Resume() {
    isPause.store(false);
    videoDecContext_->inputCond.notify_all();
    videoDecContext_->outputCond.notify_all();
    return AVCODEC_SAMPLE_ERR_OK;
}

int32_t Hdr2Sdr::VideoSeek(int64_t currentTime) {
    int64_t milliseconds = currentTime * MILLISECONDS;
    int32_t ret = demuxer_->Seek(milliseconds);
    CHECK_AND_RETURN_RET_LOG(ret == AVCODEC_SAMPLE_ERR_OK, AVCODEC_SAMPLE_ERR_ERROR, "video seek failed");
    return AVCODEC_SAMPLE_ERR_OK;
}

int32_t Hdr2Sdr::Loop(bool loopFlag) {
    loopFlag ? isLoop.store(true) : isLoop.store(false);
    if(isLoop){
        AVCODEC_SAMPLE_LOGI("isLoop is true");
    } else {
        AVCODEC_SAMPLE_LOGI("isLoop is false");
    }
    return AVCODEC_SAMPLE_ERR_OK;
}

int32_t Hdr2Sdr::SetSurface(const std::string id) {
    videoDecoder_->SetSurface(NativeXComponentSample::PluginManager::GetInstance()->multiWindowsMap.at(id));
    return AVCODEC_SAMPLE_ERR_OK;
}

int32_t Hdr2Sdr::SetSurfaceNative(bool isAuto){
    isAuto ? autoSwitchSurface.store(true) : autoSwitchSurface.store(false);
    return AVCODEC_SAMPLE_ERR_OK;
}

int32_t Hdr2Sdr::WaitForDone() {
    std::unique_lock<std::mutex> lock(mutex_);
    doneCond_.wait(lock);
    if (releaseThread_ && releaseThread_->joinable()) {
        releaseThread_->join();
        releaseThread_.reset();
    }
    AVCODEC_SAMPLE_LOGI("Done");
    return AVCODEC_SAMPLE_ERR_OK;
}

