/*
* Copyright (C) 2020 ~ 2024 chaigec All Rights Reserved.
*
* Author:     chaigec <chaigec@163.com>
* Maintainer: chaigec <chaigec@163.com>

 * Use of this source code is governed by MIT license that can be found in the
 * LICENSE file in the root of the source tree. All contributing project authors
 * may be found in the AUTHORS file in the root of the source tree.
*/
#include "MkFileSource.h"
#include "MkEvent/MkTimer.h"
#include "MkUtil/MkLog.h"

CMkFileSource::CMkFileSource(const MkMediaSourceType& SourceType, FILE *fp, CMkTimer* pTimer)
    : CMkMediaSource(SourceType)
    , m_pFp(fp)
    , m_pTimer(pTimer)
    , m_MaxLen(Len2M)
    , m_nOffset(0)
    , m_pBuf(nullptr)
    , m_ReadFrameIndex(0)
    , m_Pts(0)
    , m_nSampleRate(8000)
{
    m_pBuf = (Uint8*)malloc(m_MaxLen);
}

Uint32 CMkFileSource::StartMediaSource(const MkSourceResultCallback& ResultCb, const MkEsPacketCallback& EsCb/* = nullptr*/)
{
    if (m_VecPacket.empty()) {
        ScanFile();
    }
    CMkMediaSource::StartMediaSource(ResultCb, EsCb);
    Uint32 TimerInterval = 0;
    if (MkCodecUnknown != GetVideoParam().GetCodec()) {
        if (GetVideoParam().GetFrameRate()) {
            m_Duration = m_VecPacket.size() / GetVideoParam().GetFrameRate();
            TimerInterval = 1000 / GetVideoParam().GetFrameRate();
        }
    } else if (MkCodecUnknown != GetAudioParam().GetCodec()) {
        float Tmp = (float)1000 * m_VecPacket.size();
        Tmp /= m_nSampleRate;
        Tmp += 0.5;
        m_Duration = static_cast<Uint32>(Tmp);
        TimerInterval = 1000 * 1000 / m_nSampleRate;
    }

    fseek(m_pFp, 0, SEEK_END);
    m_FileSize = ftell(m_pFp);
    fseek(m_pFp, 0, SEEK_CUR);
    m_ReadFrameIndex = 0;
    m_Pts = 0;
    if (m_ResultCb) {
        m_ResultCb(this, NoneError);
}
    if (m_pTimer) {
#ifdef MkUt
        TimerInterval = 0;
#endif
        return m_pTimer->AddTimer([this]() {
            if (IsPause()) {
                return;
            }
            MkVector<MkEsPacket> VecEsPacket;
            GetNextFrame(VecEsPacket);
            MkVector<MkEsPacket>::iterator it = VecEsPacket.begin();
            for (; it != VecEsPacket.end(); it++) {
                OnReadEsPacket(*it);
            }
        }, TimerInterval, m_FrameGetTimer);
    }
    return NoneError;
}

Uint32 CMkFileSource::StopMediaSource()
{
    CMkMediaSource::StopMediaSource();
    if (m_pTimer) {
        m_pTimer->DeleteTimer(m_FrameGetTimer);
        //use timer thread to delete file source
        m_pTimer->AddDelayer([this]() {
            delete this;
        }, 0, m_FrameGetTimer);
    }
    MkFree(m_pBuf);
    MkCloseFile(m_pFp);
    m_pTimer = nullptr;
    return NoneError;
}

Uint32 CMkFileSource::PlayControl(const MkPlaybackControlType& ControlType, float fSpeed, Uint32 offsetSec)
{
    if (MkControlSetSpeed == ControlType) {
        if (m_pTimer) {
            m_pTimer->ChangeTimerInterval(m_FrameGetTimer, static_cast<Uint32>(1000 / (GetVideoParam().GetFrameRate()*fSpeed)));
        }

    } else if (MkControlSeek == ControlType) {
        for (Uint32 i = offsetSec * GetVideoParam().GetFrameRate(); i < m_VecPacket.size(); i++) {
            if (m_VecPacket[i].bKeyFrame) {
                m_ReadFrameIndex = i;
                break;
            }
        }
    }
    return CMkMediaSource::PlayControl(ControlType, fSpeed, offsetSec);
}

CMkH264FileSource::CMkH264FileSource(FILE *fp, CMkTimer* pTimer)
    : CMkFileSource(MkMediaSourceFile264, fp, pTimer)
{
}

Uint32 CMkH264FileSource::ScanFile()
{
    MkString Sps;
    MkString Pps;
    Uint32 nWidth = 0;
    Uint32 nHeight = 0;
    Uint32 nFrameRate = 0;
    Uint32 nTotalOffset = 0;
    //Uint32 Pts = 0;
    while (TRUE) {
        Uint32 nToRead = m_MaxLen - m_nOffset;
        int nRead = fread(m_pBuf + m_nOffset, 1, nToRead, m_pFp);
        if (nRead <= 0) {
            break;
        }
        m_nOffset += nRead;
        const Uint8* pLastNaluEnd = nullptr;
        const Uint8* pNextNaluHeader = nullptr;
        const Uint8* pOffset = m_pBuf;
        Uint8 NaluType = 0;
        Uint32 NaluLen = 0;
        MkFilePacketParam PacketParam;
        while (NoneError == CMkMediaUtil::GetNextH26xNalu(pOffset, m_pBuf + m_nOffset, &pLastNaluEnd, &pNextNaluHeader)) {
            //first packet
            if (pLastNaluEnd == m_pBuf) {
                pOffset = pNextNaluHeader;
                continue;
            }
            CMkMediaUtil::Get264NaluType(*pOffset, NaluType);
            NaluLen = pLastNaluEnd - pOffset;
            PacketParam.VecPacket.push_back(std::pair<Uint32, Uint32>(nTotalOffset + pOffset - m_pBuf, NaluLen));
            if (MkH264NalSps == NaluType && Sps.empty()) {
                Sps = MkString((Int8*)pOffset, NaluLen);
                CMkMediaUtil::H264SpsParse(pOffset, NaluLen, nWidth, nHeight, nFrameRate);
            } else if (MkH264NalPps == NaluType && Pps.empty()) {
                Pps = MkString((Int8*)pOffset, NaluLen);
            } else if (MkH264NalSlice == NaluType || MkH264NalIdrSlice == NaluType) {
                PacketParam.bKeyFrame = MkH264NalIdrSlice == NaluType;
                //PacketParam.Dts = PacketParam.Dts = Pts;
                PacketParam.CodecType = MkCodecH264;
                m_VecPacket.push_back(PacketParam);
                PacketParam.VecPacket.clear();
                //Pts += 1000 / nFrameRate;
            }
            pOffset = pNextNaluHeader;
        }
        if (nRead != nToRead) {
            CMkMediaUtil::Get264NaluType(*pOffset, NaluType);
            PacketParam.bKeyFrame = MkH264NalIdrSlice == NaluType;
            PacketParam.CodecType = MkCodecH264;
            PacketParam.VecPacket.push_back(std::pair<Uint32, Uint32>(nTotalOffset + pOffset - m_pBuf, m_nOffset - (pOffset - m_pBuf)));
            m_VecPacket.push_back(PacketParam);
            break;
        }
        nTotalOffset += pOffset - m_pBuf;
        m_nOffset -= pOffset - m_pBuf;
        memmove(m_pBuf, pOffset, m_nOffset);
    }
    CMkVideoParam Param(MkCodecH264, nWidth, nHeight, nFrameRate, 0);
    Param.SetSps(Sps);
    Param.SetPps(Pps);
    SetVideoParam(Param);
    return NoneError;
}

Uint32 CMkH264FileSource::GetNextFrame(MkVector<MkEsPacket>& VecEsPacket)
{
    MkEsPacket EsPacket;
    EsPacket.Dts = EsPacket.Pts = m_Pts;
    EsPacket.CodecType = MkCodecH264;
    if (m_ReadFrameIndex >= m_VecPacket.size()) {
        if (m_pTimer) {
            m_pTimer->DeleteTimer(m_FrameGetTimer);
        }
        EsPacket.bKeyPacket = TRUE;
    } else {
        MkFilePacketParam ReadPacketParam = m_VecPacket[m_ReadFrameIndex];
        m_ReadFrameIndex++;
        m_Pts += static_cast<Uint32>(1000 / (GetVideoParam().GetFrameRate()*GetSpeed()));
        if (GetSpeed() >= 8.0 && !ReadPacketParam.bKeyFrame) {
            return NotHasEnoughBuffer;
        }
        EsPacket.bKeyPacket = ReadPacketParam.bKeyFrame;
        Uint32 nOffset = 0;
        Uint8 NaluType = 0;
        for (size_t i = 0; i < ReadPacketParam.VecPacket.size(); i++) {
            fseek(m_pFp, ReadPacketParam.VecPacket[i].first, SEEK_SET);
            fread(m_pBuf + nOffset, 1, ReadPacketParam.VecPacket[i].second, m_pFp);
            EsPacket.BufferList.Append(m_pBuf + nOffset, ReadPacketParam.VecPacket[i].second);
            nOffset += ReadPacketParam.VecPacket[i].second;
        }
    }
    VecEsPacket.emplace_back(EsPacket);
    return NoneError;
}

CMkH265FileSource::CMkH265FileSource(FILE *fp, CMkTimer* pTimer)
    : CMkFileSource(MkMediaSourceFileH265, fp, pTimer)
{
}

Uint32 CMkH265FileSource::ScanFile()
{
    MkString Sps;
    MkString Pps;
    MkString Vps;
    Uint32 nWidth = 0;
    Uint32 nHeight = 0;
    Uint32 nTotalOffset = 0;
    while (TRUE) {
        Uint32 nToRead = m_MaxLen - m_nOffset;
        int nRead = fread(m_pBuf + m_nOffset, 1, nToRead, m_pFp);
        if (nRead <= 0) {
            break;
        }
        m_nOffset += nRead;
        const Uint8* pLastNaluEnd = nullptr;
        const Uint8* pNextNaluHeader = nullptr;
        const Uint8* pOffset = m_pBuf;
        Uint8 NaluType = 0;
        Uint32 NaluLen = 0;
        MkFilePacketParam PacketParam;
        while (NoneError == CMkMediaUtil::GetNextH26xNalu(pOffset, m_pBuf + m_nOffset, &pLastNaluEnd, &pNextNaluHeader)) {
            //first packet
            if (pLastNaluEnd == m_pBuf) {
                pOffset = pNextNaluHeader;
                continue;
            }
            CMkMediaUtil::Get265NaluType(*pOffset, NaluType);
            NaluLen = pLastNaluEnd - pOffset;
            PacketParam.VecPacket.push_back(std::pair<Uint32, Uint32>(nTotalOffset + pOffset - m_pBuf, NaluLen));
            if (MkHevcNalSps == NaluType) {
                Sps = MkString((Int8*)pOffset, NaluLen);
                CMkMediaUtil::H265SpsParse(pOffset, NaluLen, nWidth, nHeight);
            } else if (MkHevcNalPps == NaluType) {
                Pps = MkString((Int8*)pOffset, NaluLen);
            } else if (MkHevcNalVps == NaluType) {
                Vps = MkString((Int8*)pOffset, NaluLen);
            } else if (MkHevcNalIdrWRadl == NaluType
                || MkHevcNalTrailR == NaluType
                || MkHevcNalIdrNLp == NaluType
                || MkHevcNalCraNut == NaluType
                || MkHevcNalTrailN == NaluType) {
                PacketParam.bKeyFrame = (MkHevcNalIdrWRadl == NaluType) || (MkHevcNalIdrNLp == NaluType) || (MkHevcNalCraNut == NaluType);  //MkHevcNalIdrWRadl or MkHevcNalIdrNLp is all idr
                m_VecPacket.push_back(PacketParam);
                PacketParam.VecPacket.clear();
            }
            pOffset = pNextNaluHeader;
        }
        if (nRead != nToRead) {
            CMkMediaUtil::Get265NaluType(*pOffset, NaluType);
            PacketParam.bKeyFrame = (MkHevcNalIdrWRadl == NaluType) || (MkHevcNalIdrNLp == NaluType) || (MkHevcNalCraNut == NaluType);
            PacketParam.VecPacket.push_back(std::pair<Uint32, Uint32>(nTotalOffset + pOffset - m_pBuf, m_nOffset - (pOffset - m_pBuf)));
            m_VecPacket.push_back(PacketParam);
            break;
        }
        nTotalOffset += pOffset - m_pBuf;
        m_nOffset -= pOffset - m_pBuf;
        memmove(m_pBuf, pOffset, m_nOffset);
    }
    CMkVideoParam Param(MkCodecH265, nWidth, nHeight, 25, 0);
    Param.SetSps(Sps);
    Param.SetPps(Pps);
    Param.SetVps(Vps);
    SetVideoParam(Param);
    return NoneError;
}

Uint32 CMkH265FileSource::GetNextFrame(MkVector<MkEsPacket>& VecEsPacket)
{
    MkEsPacket EsPacket;
    EsPacket.Dts = EsPacket.Pts = m_Pts;
    EsPacket.CodecType = MkCodecH265;
    if (m_ReadFrameIndex >= m_VecPacket.size()) {
        if (m_pTimer) {
            m_pTimer->DeleteTimer(m_FrameGetTimer);
        }

        EsPacket.bKeyPacket = TRUE;
    } else {
        MkFilePacketParam ReadPacketParam = m_VecPacket[m_ReadFrameIndex];
        m_Pts += static_cast<Uint32>(1000 / (GetVideoParam().GetFrameRate()*GetSpeed()));
        m_ReadFrameIndex++;
        if (GetSpeed() >= 8.0 && !ReadPacketParam.bKeyFrame) {
            return NotHasEnoughBuffer;
        }
        EsPacket.bKeyPacket = ReadPacketParam.bKeyFrame;
        Uint32 nOffset = 0;
        Uint8 NaluType = 0;
        for (size_t i = 0; i < ReadPacketParam.VecPacket.size(); i++) {
            fseek(m_pFp, ReadPacketParam.VecPacket[i].first, SEEK_SET);
            fread(m_pBuf + nOffset, 1, ReadPacketParam.VecPacket[i].second, m_pFp);
            EsPacket.BufferList.Append(m_pBuf + nOffset, ReadPacketParam.VecPacket[i].second);
            nOffset += ReadPacketParam.VecPacket[i].second;
        }
    }
    VecEsPacket.emplace_back(EsPacket);
    return NoneError;
}

CMkAacFileSource::CMkAacFileSource(FILE *fp, CMkTimer* pTimer)
    : CMkFileSource(MkMediaSourceFileAac, fp, pTimer)
{

}

Uint32 CMkAacFileSource::ScanFile()
{
    Uint32 BitRate = 0;
    MkAudioSampleRate SampleRate = MkSampleRate44100Hz;
    MkAudioSampleSize SampleSize = MkSampleSize16Bits;
    BOOL bStereo = FALSE;
    Uint8 Channel = 1;
    Uint32 nTotalOffset = 0;
    Uint32 ErrorCode = NoneError;
    MkAacObjectType ObjType = MkAacObjectNull;
    BOOL bGetParam = FALSE;
    while (TRUE) {
        Uint32 nToRead = m_MaxLen - m_nOffset;
        int nRead = fread(m_pBuf + m_nOffset, 1, nToRead, m_pFp);
        if (nRead <= 0) {
            break;
        }
        m_nOffset += nRead;
        Uint8 *pOffset = m_pBuf;
        Uint8 *pEnd = m_pBuf + m_nOffset;
        while (pOffset + MkAacAdtsLen < pEnd) {
            const Uint8* pAdtsData = nullptr;
            Uint32 nAdtsLen = 0;
            Uint32 nUsed = 0;
            ErrorCode = CMkMediaUtil::GetAacAdtsFrame(pOffset, pEnd - pOffset, &pAdtsData, nAdtsLen, nUsed);
            if (NoneError != ErrorCode) {
                break;
            }
            if (!bGetParam) {
                CMkMediaUtil::AacAdtsParse(pAdtsData, nAdtsLen, ObjType, SampleRate, Channel);
                m_nSampleRate = CMkMediaUtil::AudioSampleRateIndexToRate(SampleRate);
                ;
            }

            MkFilePacketParam PacketParam;
            PacketParam.bKeyFrame = TRUE;
            PacketParam.VecPacket.push_back(std::pair<Uint32, Uint32>(nTotalOffset + pAdtsData - m_pBuf + MkAacAdtsLen, nAdtsLen - MkAacAdtsLen));
            m_VecPacket.push_back(PacketParam);
            pOffset += nUsed;
        }
        nTotalOffset += pOffset - m_pBuf;
        m_nOffset -= pOffset - m_pBuf;
        memmove(m_pBuf, pOffset, m_nOffset);
    }
    CMkAudioParam Param(MkCodecAac, BitRate, SampleRate, SampleSize, bStereo, Channel);
    Param.SetObjectType(ObjType);
    SetAudioParam(Param);
    return NoneError;
}

Uint32 CMkAacFileSource::GetNextFrame(MkVector<MkEsPacket>& VecEsPacket)
{
    MkEsPacket EsPacket;
    EsPacket.Dts = EsPacket.Pts = m_Pts;
    EsPacket.CodecType = MkCodecAac;
    if (m_ReadFrameIndex >= m_VecPacket.size()) {
        if (m_pTimer) {
            m_pTimer->DeleteTimer(m_FrameGetTimer);
        }

        EsPacket.bKeyPacket = TRUE;
    } else {
        MkFilePacketParam ReadPacketParam = m_VecPacket[m_ReadFrameIndex];
        m_Pts += static_cast<Uint32>(1000 * 1000 / m_nSampleRate);
        m_ReadFrameIndex++;
        EsPacket.bKeyPacket = ReadPacketParam.bKeyFrame;
        Uint32 nOffset = 0;
        Uint8 NaluType = 0;
        for (size_t i = 0; i < ReadPacketParam.VecPacket.size(); i++) {
            fseek(m_pFp, ReadPacketParam.VecPacket[i].first, SEEK_SET);
            fread(m_pBuf + nOffset, 1, ReadPacketParam.VecPacket[i].second, m_pFp);
            EsPacket.BufferList.Append(m_pBuf + nOffset, ReadPacketParam.VecPacket[i].second);
            nOffset += ReadPacketParam.VecPacket[i].second;
        }
    }
    VecEsPacket.emplace_back(EsPacket);
    return NoneError;
}

CMkFileSource::~CMkFileSource()
{
    StopMediaSource();
}

CMkH264FileSource::~CMkH264FileSource()
{
    //MkDebugLog("delete this:%p\n", this);
}

CMkH265FileSource::~CMkH265FileSource()
{
}

CMkAacFileSource::~CMkAacFileSource()
{

}