#ifndef FFMPEGDECODE_H
#define FFMPEGDECODE_H

#include "systemHead.h"
#include "circularqueue.h"

//typedef struct H264FrameDef
//{
//    unsigned int    length;
//    unsigned char*  dataBuffer;

//}H264Frame;

//typedef struct  H264YUVDef
//{
//    unsigned int    width;
//    unsigned int    height;
//    H264Frame       luma;
//    H264Frame       chromaB;
//    H264Frame       chromaR;
//    long long       pts;

//}H264YUV_Frame;



struct H264YUVPanel{
    unsigned int length;
    unsigned char*  dataBuffer = nullptr;
    ~H264YUVPanel()
    {
        if(dataBuffer != nullptr)
        {
            free(dataBuffer);
            dataBuffer = nullptr;
        }
    }
};




//三个颜色分量，图片大小，时间帧
struct H264Frame{
    int Width;
    int Height;
    H264YUVPanel    Y;
    H264YUVPanel    U;
    H264YUVPanel    V;
    long long       pts;
    void Memcpy(AVFrame *temp);
private:
    void MemcpyPanel(int minWidth, int height, int linesize, AVFrame *temp, uint8_t *src, uint8_t *dst);
};

typedef  void (*UpdateOpenGLDate) (std::unique_ptr<H264Frame>, int64_t);


struct MyAVFormatContext{
    MyAVFormatContext()
    {

    }
    void Destroy()
    {
        if(_avFormatContext != nullptr)
        {
            avformat_close_input(&_avFormatContext);
            _avFormatContext = nullptr;
        }
    }
    virtual ~MyAVFormatContext()
    {
        Destroy();
    }
    AVFormatContext *_avFormatContext = nullptr;
};

struct AVCodecContextDeleter{
    void operator()(AVCodecContext *pCodecCtx)
    {
        if(pCodecCtx != nullptr)
        {
            avcodec_close(pCodecCtx);
//            avcodec_free_context(&pCodecCtx);
            pCodecCtx = nullptr;
        }
    }
};

struct AVFrameDeleter{
    void operator()(AVFrame *frame)
    {
        if(frame != nullptr)
        {
            av_frame_free(&frame);
            frame = nullptr;
        }
    }
};

struct AVPacketWrap{
    AVPacket tempPacket;
    void Destroy()
    {
        if(tempPacket.data != nullptr)
        {
            av_packet_unref(&tempPacket);
        }
    }
    ~AVPacketWrap()
    {
        Destroy();
    }
};

struct SwsContextWrap{
    SwsContext *TempContext = nullptr;
    void Destroy()
    {
        if(TempContext != nullptr)
        {

            sws_freeContext(TempContext);
            TempContext = nullptr;
        }
    }
    ~SwsContextWrap()
    {
        Destroy();
    }
};

struct SwrContextWrap{
    SwrContext *TempContext = nullptr;
    void Destroy()
    {
        if(TempContext != nullptr)
        {
            swr_free(&TempContext);
            TempContext = nullptr;
        }
    }
    ~SwrContextWrap()
    {
        Destroy();
    }
};

struct YUVBufferWrap{
    uint8_t *data = nullptr;
    void Destroy()
    {
        if(data != nullptr)
        {
            av_free(data);
            data = nullptr;
        }
    }
    ~YUVBufferWrap()
    {
        Destroy();
    }
};

struct SwrBufferWrap{
    uint8_t         *data = nullptr;
    int64_t         length = 0;
    void Destroy()
    {
        if(data != nullptr)
        {
            free(data);
            data = nullptr;
            length = 0;
        }
    }
    ~SwrBufferWrap()
    {
        Destroy();
    }
};


class VedioDecode
{
public:
    VedioDecode();
    virtual ~VedioDecode();
    void SetAVCodecContext(AVCodec *avCode, AVCodecParameters *decodeVedioContex);
    void SetYUVBuffer();

    //设置媒体流序号
    void SetId(int i)
    {
        _id = i;
    }
    int GetId() const
    {
        return _id;
    }
    AVCodecContext *GetAVCodecContext() const
    {
        return _pCodecCtx.get();
    }
    void SetTimeBase(AVRational *avr)
    {
        _avr = avr;
    }
    const AVRational * GetTimeBase() const
    {
        return _avr;
    }
    void SetDuration(double time)
    {
        _duration = time;
    }
    void startDecode(CircularQueue &queue);
    void SetCallBackRender(UpdateOpenGLDate func, int64_t screen)
    {
        _Render = func;
        _screen = screen;

    }
    double GetPts(AVFrame *);//由时间基换算成毫秒
    virtual void Destroy() //释放内部的内存但不销毁对象
    {
        _pCodecCtx.reset();
        _yuvFrame.reset();
        _yuvBuffer.Destroy();
        _swsContextWrap.Destroy();
    }
//    void SetAdjust()
//    {
//        _adjust = true;
//    }
//    void CheckPlayStatus();
protected:
    virtual void convertAndRender(AVFrame *temp);
    void checkQueueEmpty(const CircularQueue &queue) const;
    virtual bool Synchronization(AVFrame *);
private:

protected:
    std::unique_ptr<AVCodecContext, AVCodecContextDeleter>  _pCodecCtx;
    int                                                     _id = -1;
    AVCodec                                                 *_codec = nullptr;
    AVRational *                                            _avr;//内存由AvFormatContext管理，这边只用，不能更改
    double                                                  _duration = 0;
    /*bool                                                    _adjust = false;*///进行调整的标志位
private:
    YUVBufferWrap                                           _yuvBuffer;
    std::unique_ptr<AVFrame, AVFrameDeleter>                _yuvFrame;
    UpdateOpenGLDate                                        _Render;
    SwsContextWrap                                          _swsContextWrap;
    int64_t                                                 _screen;
//    std::unique_ptr<AVFrame, AVFrameDeleter>                _pAVFrame;
};

class AudioDecode
        :public VedioDecode
{
public:
    void Destroy() override;
protected:
    void convertAndRender(AVFrame *temp) override;
    bool Synchronization(AVFrame *) override;
//    void checkQueueEmpty(const CircularQueue &queue) const override;
private:
    SwrContextWrap                  _swrContext;
};

class FFmpegDecode
{
public:
    FFmpegDecode()
    {

    }
    FFmpegDecode(const QString &filePah);
    void SetFilePath(const QString &filePah);
    void init();
    virtual ~FFmpegDecode();
    void startReadFrame();
    void startDecodeVedio();
    void SetCallBackRender(UpdateOpenGLDate func, int64_t screen);//调用频率太高所以不用信号槽机制
    void SetProgress(double);//占总进度的比例
    double GetDuration() const
    {
        return std::max(_realVedio, _realAudio)*1000;
    }
    double GetVedioDuration() const
    {
        return _realVedio;
    }
    double GetAudioDDuration() const
    {
        return _realAudio;
    }
    static double                                          vedioMSec;//单位是ms 表示目前的pts
    static double                                          audioMSec;
    static int                                             audioPtsDelayoffset;
    static int                                             vedioPtsDelayoffset;
    constexpr static int                                   SynchronizationPrecision = 30;//同步精度，单位毫秒
    static bool                                            threadExit;
    void reset();
    void threadJoin();
private:
    QString                                                 _filePath;
    MyAVFormatContext                                       _myAvFormatContext;
    VedioDecode                                             _vd;
    AudioDecode                                             _ad;
    CircularQueue                                           _shareDataVedio, _shareDataAudio;
    decltype (std::thread())                                _vedioThread;
    decltype (std::thread())                                _audioThread;
    decltype (std::thread())                                _readThread;
    double                                                  _realVedio, _realAudio;//媒体文件总时长,单位s
//    bool                                                    _afterSeekFirstPacketTwo = false;
    //表示此包是在定位后的第一个包也是定位前的最后一个包，要舍弃，否则会出现同步问题
    bool                                                    _afterSeekFirstPacket = false;
};
#endif // FFMPEGDECODE_H
