#include <thread>
#include "FFMpegPusher.h"
#include "ZYUtils/ZYLog.h"
#include "libyuv.h"
#include "ZYUtils/ZYTimer.h"

static const char* av_err2str_fix(int errnum) {
    static char str[AV_ERROR_MAX_STRING_SIZE];
    memset(str, 0, sizeof(str));
    return av_make_error_string(str, AV_ERROR_MAX_STRING_SIZE, errnum);
}

namespace ZYLive {
    FFMpegPusher::FFMpegPusher(int width, int height, int fps, float bitrate, std::string url)
    : mWidth(width)
    , mHeight(height)
    , mFps(fps)
    , mBitRate(bitrate)
    , mUrl(url)
    {
        char strBuffer[1024 * 4] = {0};
        strcat(strBuffer, avcodec_configuration());
        ZYUtils::ZY_LOG("lucas, avcodec_configuration:%s,%s", strBuffer, av_version_info());
        auto position = mUrl.find("rtsp");
        mBIsRTSP = (position != std::string::npos);
    }

    FFMpegPusher::~FFMpegPusher() {
        if (mImgConvertCtx) {
            sws_freeContext(mImgConvertCtx);
            mImgConvertCtx = nullptr;
        }

        if (mFrame != nullptr) {
            av_frame_free(&mFrame);
            mFrame = nullptr;
        }

        if (mFrameSrc != nullptr) {
            av_frame_free(&mFrameSrc);
            mFrameSrc = nullptr;
        }

        if (mPCodecContext != nullptr) {
            avcodec_free_context(&mPCodecContext);
            mPCodecContext = nullptr;
        }

        if (mPBuffer != nullptr) {
            av_free(mPBuffer);
            mPBuffer = nullptr;
        }

        if (mPBufferSrc != nullptr) {
            av_free(mPBufferSrc);
            mPBufferSrc = nullptr;
        }

        if (mPFormatContext) {
            if (mPFormatContext->pb) {
                av_write_trailer(mPFormatContext);
                avio_close(mPFormatContext->pb);
            }
            avformat_free_context(mPFormatContext);
            mPFormatContext = nullptr;
        }
    }

    void FFMpegPusher::init() {
        std::string format = "rtsp";
        if (mBIsRTSP == false)
            format = "flv";

        int ret = avformat_alloc_output_context2(&mPFormatContext, NULL, format.c_str(), mUrl.c_str());
        if (ret < 0) {
            ZYUtils::ZY_LOG_ASSERT("avformat_alloc_output_context2 error: %s", av_err2str_fix(ret));
        }

        mPCodec = avcodec_find_encoder_by_name("libx264");
        if (mPCodec == nullptr) {
            ZYUtils::ZY_LOG("尝试查找libx264编码器失败，尝试查找默认H264编码器");
            mPCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
            if (mPCodec == nullptr) {
                ZYUtils::ZY_LOG_ASSERT("无法找到任何H264编码器!");
            }
        }

        mPCodecContext = avcodec_alloc_context3(mPCodec);
        
        mPCodecContext->thread_count = 1; 
        mPCodecContext->thread_type = 0;  
        
        mPCodecContext->codec_id = mPCodec->id;
        mPCodecContext->pix_fmt = AV_PIX_FMT_YUV420P;
        mPCodecContext->codec_type = AVMEDIA_TYPE_VIDEO;
        mPCodecContext->width = mWidth;
        mPCodecContext->height = mHeight;
        mPCodecContext->framerate = (AVRational){mFps, 1};
        mPCodecContext->time_base = (AVRational){1, mFps};
        
        // 不设置bit_rate，让CRF模式完全控制
        // mPCodecContext->bit_rate = mBitRate;  
        
        mPCodecContext->gop_size = 10; // 从15减少到10
        mPCodecContext->keyint_min = 5;
        mPCodecContext->max_b_frames = 0;

        if (mPFormatContext->oformat->flags & AVFMT_GLOBALHEADER) {
            mPCodecContext->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
        }


        mPCodecContext->rc_buffer_size = 0;
        mPCodecContext->rc_max_rate = 0;
        mPCodecContext->rc_min_rate = 0;


        AVDictionary *param = nullptr;
        if (strcmp(mPCodec->name, "libx264") == 0) {
            av_dict_set(&param, "preset", "ultrafast", 0);      // 最快预设
            av_dict_set(&param, "tune", "zerolatency", 0);      // 零延迟调优
            av_dict_set(&param, "profile", "baseline", 0);      // 基准档次
            
    
            av_dict_set(&param, "crf", "20", 0);              
            
       
            av_dict_set(&param, "keyint", "10", 0);           
            av_dict_set(&param, "min-keyint", "5", 0);         
            av_dict_set(&param, "scenecut", "0", 0);           
            
       
            av_dict_set(&param, "bframes", "0", 0);            
            av_dict_set(&param, "ref", "1", 0);               
            av_dict_set(&param, "me", "dia", 0);              
            av_dict_set(&param, "subme", "0", 0);             
            av_dict_set(&param, "trellis", "0", 0);            
            av_dict_set(&param, "aq-mode", "0", 0);            
            av_dict_set(&param, "weightp", "0", 0);            
            av_dict_set(&param, "8x8dct", "0", 0);            
            av_dict_set(&param, "cabac", "0", 0);              
            av_dict_set(&param, "deblock", "0:0:0", 0);        
            av_dict_set(&param, "fast-pskip", "1", 0);         
            av_dict_set(&param, "mixed-refs", "0", 0);         
            

            av_dict_set(&param, "sliced-threads", "0", 0);     
            av_dict_set(&param, "rc-lookahead", "0", 0);       
            av_dict_set(&param, "sync-lookahead", "0", 0);    
            
    
            av_dict_set(&param, "x264-params", "nal-hrd=none", 0);
        }

        ret = avcodec_open2(mPCodecContext, mPCodec, &param);
        if (ret < 0) {
            ZYUtils::ZY_LOG_ASSERT("Failed to open encoder: %s", av_err2str_fix(ret));
        }
        
        av_dict_free(&param);

        mPVideoStream = avformat_new_stream(mPFormatContext, mPCodec);
        if (mPVideoStream == nullptr) {
            ZYUtils::ZY_LOG_ASSERT("avformat_new_stream error!");
        }

        mPVideoStream->time_base = (AVRational){1, mFps};
        mPVideoStream->codecpar->codec_tag = 0;
        avcodec_parameters_from_context(mPVideoStream->codecpar, mPCodecContext);

        if (!(mPFormatContext->oformat->flags & AVFMT_NOFILE)) {
            AVDictionary *avio_opts = nullptr;
            if (mBIsRTSP) {
                av_dict_set(&avio_opts, "rtsp_transport", "udp", 0);       
                av_dict_set(&avio_opts, "rtsp_flags", "prefer_tcp", 0);    
                av_dict_set(&avio_opts, "stimeout", "1000000", 0);       
                av_dict_set(&avio_opts, "max_delay", "0", 0);             
                av_dict_set(&avio_opts, "fflags", "nobuffer", 0);         
                av_dict_set(&avio_opts, "buffer_size", "8192", 0);       
                

                av_dict_set(&avio_opts, "pkt_size", "1316", 0);          
                av_dict_set(&avio_opts, "reuse", "1", 0);                
                av_dict_set(&avio_opts, "ttl", "64", 0);                 
                
            } else {
              
                av_dict_set(&avio_opts, "flv_metadata", "1", 0);
                av_dict_set(&avio_opts, "max_delay", "0", 0);
            }
            
            ret = avio_open2(&mPFormatContext->pb, mUrl.c_str(), AVIO_FLAG_WRITE, nullptr, &avio_opts);
            av_dict_free(&avio_opts);
            
            if (ret < 0) {
                ZYUtils::ZY_LOG("Could not open output URL '%s': %s", mUrl.c_str(), av_err2str_fix(ret));
            }
        }

     
        AVDictionary *fmt_opts = nullptr;
        if (mBIsRTSP) {
            av_dict_set(&fmt_opts, "fflags", "nobuffer+flush_packets", 0); 
            av_dict_set(&fmt_opts, "max_delay", "0", 0);                   
            av_dict_set(&fmt_opts, "packetsize", "1316", 0);               
            av_dict_set(&fmt_opts, "rtsp_listen", "1", 0);                 
        }
        
        ret = avformat_write_header(mPFormatContext, &fmt_opts);
        if (ret < 0) {
            ZYUtils::ZY_LOG("avformat_write_header failed: %s", av_err2str_fix(ret));
        }
        av_dict_free(&fmt_opts);
    
        av_init_packet(&mPkt);

        mImgConvertCtx = sws_getContext(mWidth, mHeight, AV_PIX_FMT_RGBA,
                                        mWidth, mHeight, mPCodecContext->pix_fmt,
                                        SWS_FAST_BILINEAR, 
                                        nullptr, nullptr, nullptr);

        ZYUtils::ZY_LOG("FFMpeg推流器初始化完成，编码器: %s", mPCodec->name);
        mBInit = true;
    }

    void FFMpegPusher::initializeFrameBuffers(int width, int height) {
        if (mPBuffer == nullptr) {
            mFrame = av_frame_alloc();
            int picSize = av_image_get_buffer_size(mPCodecContext->pix_fmt, mPCodecContext->width,
                                                   mPCodecContext->height, 1);
            mPBuffer = (uint8_t*)av_malloc(picSize);
            av_image_fill_arrays(mFrame->data, mFrame->linesize, mPBuffer, mPCodecContext->pix_fmt,
                                 mPCodecContext->width, mPCodecContext->height, 1);

            mFrame->format = AV_PIX_FMT_YUV420P;
            mFrame->width = mWidth;
            mFrame->height = mHeight;
        }

        if (mPBufferSrc == nullptr) {
            mFrameSrc = av_frame_alloc();
            int picSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, width, height, 1);
            mPBufferSrc = (uint8_t*)av_malloc(picSize);
            av_image_fill_arrays(mFrameSrc->data, mFrameSrc->linesize, mPBufferSrc, AV_PIX_FMT_YUV420P,
                                 width, height, 1);

            mFrameSrc->format = AV_PIX_FMT_YUV420P;
            mFrameSrc->width = width;
            mFrameSrc->height = height;
        }
    }

    void FFMpegPusher::copyYUV420Data(uint8_t* pData, int width, int height) {
        int length = width * height;
        int uvLength = length / 4;

        uint8_t *yin = pData;
        uint8_t *uin = yin + length;
        uint8_t *vin = uin + uvLength;


        if (width == mFrameSrc->linesize[0]) {
            memcpy(mFrameSrc->data[0], yin, length);
        } else {
            copyPlaneData(yin, mFrameSrc->data[0], width, mFrameSrc->linesize[0], height);
        }


        int uvWidth = width >> 1;
        int uvHeight = height >> 1;
        if (uvWidth == mFrameSrc->linesize[1]) {
            memcpy(mFrameSrc->data[1], uin, uvLength);
        } else {
            copyPlaneData(uin, mFrameSrc->data[1], uvWidth, mFrameSrc->linesize[1], uvHeight);
        }


        if (uvWidth == mFrameSrc->linesize[2]) {
            memcpy(mFrameSrc->data[2], vin, uvLength);
        } else {
            copyPlaneData(vin, mFrameSrc->data[2], uvWidth, mFrameSrc->linesize[2], uvHeight);
        }
    }

    void FFMpegPusher::copyPlaneData(uint8_t* src, uint8_t* dst, int width, int linesize, int height) {
        for (int i = 0; i < height; i++) {
            memcpy(dst, src, width);
            src += width;
            dst += linesize;
        }
    }

    void FFMpegPusher::setupPacketTimestamp() {
        mPkt.stream_index = mPVideoStream->index;
        

        AVRational encoder_tb = mPCodecContext->time_base;
        AVRational stream_tb = mPVideoStream->time_base;
        
        mPkt.pts = av_rescale_q_rnd(mCount, encoder_tb, stream_tb, 
                                    (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
        mPkt.dts = mPkt.pts; 
        mPkt.duration = av_rescale_q(1, encoder_tb, stream_tb);
    }

    int FFMpegPusher::pushData(uint8_t* pData, int format, int width, int height) {
        ZYTime time1;
        
        if (mBInit == false) {
            init();
        }
        
        if (pData == nullptr) {
            ZYUtils::ZY_LOG("pData is nullptr");
            return -1;
        }
        
        if (format > 2) {
            ZYUtils::ZY_LOG("format error!");
            return -1;
        }

        auto diff1 = time1.GetTimeOfDuration();

        initializeFrameBuffers(width, height);

        mFrame->pts = mCount;


        int conversion_result = 0;
        if (format == 1) { // ARGB
            conversion_result = libyuv::ARGBToI420(
                pData, width * 4,
                mFrameSrc->data[0], mFrameSrc->linesize[0],
                mFrameSrc->data[1], mFrameSrc->linesize[1],
                mFrameSrc->data[2], mFrameSrc->linesize[2],
                width, height
            );
        } else if (format == 2) { // ABGR
            conversion_result = libyuv::ABGRToI420(
                pData, width * 4,
                mFrameSrc->data[0], mFrameSrc->linesize[0],
                mFrameSrc->data[1], mFrameSrc->linesize[1],
                mFrameSrc->data[2], mFrameSrc->linesize[2],
                width, height
            );
        } else { // YUV420P
            copyYUV420Data(pData, width, height);
        }

        if (conversion_result != 0) {
            ZYUtils::ZY_LOG("颜色转换失败，错误码: %d", conversion_result);
            return -1;
        }

     
        if (width != mWidth || height != mHeight) {
            int scale_result = libyuv::I420Scale(
                (const uint8_t*)mFrameSrc->data[0], mFrameSrc->linesize[0],
                (const uint8_t*)mFrameSrc->data[1], mFrameSrc->linesize[1],
                (const uint8_t*)mFrameSrc->data[2], mFrameSrc->linesize[2],
                width, height,
                (uint8_t*)mFrame->data[0], mFrame->linesize[0],
                (uint8_t*)mFrame->data[1], mFrame->linesize[1],
                (uint8_t*)mFrame->data[2], mFrame->linesize[2],
                mWidth, mHeight,
                libyuv::FilterMode::kFilterBilinear
            );
            
            if (scale_result != 0) {
                ZYUtils::ZY_LOG("缩放失败: %d", scale_result);
                return -1;
            }
        } else {
      
            av_frame_copy(mFrame, mFrameSrc);
        }

        auto diff2 = time1.GetTimeOfDuration();

    
        mPkt.data = nullptr;
        mPkt.size = 0;

        int ret = avcodec_send_frame(mPCodecContext, mFrame);
        if (ret != 0) {
            ZYUtils::ZY_LOG("avcodec_send_frame error: %s", av_err2str_fix(ret));
            return -1;
        }

        auto diff3 = time1.GetTimeOfDuration();
        
        ret = avcodec_receive_packet(mPCodecContext, &mPkt);
        if (ret != 0 || mPkt.size <= 0) {
            if (ret == AVERROR(EAGAIN)) {
         
                return 0;
            }
            ZYUtils::ZY_LOG("avcodec_receive_packet error: %s", av_err2str_fix(ret));
            return -1;
        }

        auto diff4 = time1.GetTimeOfDuration();

     
        setupPacketTimestamp();

   
        int write_ret = av_interleaved_write_frame(mPFormatContext, &mPkt);
        if (write_ret != 0) {
            ZYUtils::ZY_LOG("av_interleaved_write_frame failed: %s", av_err2str_fix(write_ret));
            return -1;
        }


        if (mPFormatContext->pb) {
            avio_flush(mPFormatContext->pb);
        }

        mCount++;
        auto diff5 = time1.GetTimeOfDuration();
        
        // 性能监控（每30帧输出一次）
        // static int frame_counter = 0;
        // if (++frame_counter % 30 == 0) {
        //     ZYUtils::ZY_LOG("性能统计[帧%lld]: 预处理=%.2fms, 转换=%.2fms, 编码=%.2fms, 打包=%.2fms, 发送=%.2fms", 
        //                    mCount, diff1, diff2-diff1, diff3-diff2, diff4-diff3, diff5-diff4);
        // }
        
        return 0;
    }
}