#pragma once
#include <string>

#ifdef __cplusplus
extern "C" {
#include "libavcodec/avcodec.h"
#include <libavformat/avformat.h>
#include <libavfilter/avfilter.h>
#include <libswscale/swscale.h>
#include "libavutil/time.h"
#include "libavutil/imgutils.h"
#include "libavutil/opt.h"        
#include "libavutil/dict.h"       
}
#endif

#ifdef USE_MPP
#include "mpp/MppEncoder264.h"
#endif

namespace ZYLive {
    class FFMpegPusher {
    public:
        FFMpegPusher(int width, int height, int fps, float bitrate = 2000, std::string url = std::string());
        ~FFMpegPusher();

        // 0:YUV420P; 1:ARGB; 2:ABGR
        int pushData(uint8_t* pData, int format, int width, int height);
        
        const char* getEncoderName() const {
            return mPCodec ? mPCodec->name : "未初始化";
        }

    private:
        void init();
        
        void initializeFrameBuffers(int width, int height);
        void copyYUV420Data(uint8_t* pData, int width, int height);
        void copyPlaneData(uint8_t* src, uint8_t* dst, int width, int linesize, int height);
        void setupPacketTimestamp();

        bool mBInit = false;
        int mWidth;
        int mHeight;
        int mFps;
        float mBitRate;
        std::string mUrl;
        bool mBIsRTSP;

        AVFormatContext *mPFormatContext = nullptr;
        AVStream *mPVideoStream = nullptr;
        AVCodecContext *mPCodecContext = nullptr;
        AVCodec *mPCodec = nullptr;

        AVFrame *mFrame = nullptr;
        uint8_t *mPBuffer = nullptr;
        AVFrame *mFrameSrc = nullptr;
        uint8_t *mPBufferSrc = nullptr;

        int64_t mCount = 0;
        AVPacket mPkt;

        SwsContext *mImgConvertCtx = nullptr;

#ifdef USE_MPP
        std::shared_ptr<MPP::MppEncoder264> mEncoder = nullptr;
        std::string mSpsInfo = std::string();
#endif
    };
}