//
// Created by mayunxi on 2020/6/11.
//

#include "Decoder4GPU.h"

int Decoder4GPU::hw_decoder_init(AVCodecContext *ctx, const enum AVHWDeviceType type)
{
    int err = 0;
    //av_hwdevice_ctx_create创建硬件设备相关的上下文信息AVHWDeviceContext和对硬件设备进行初始化。
    if ((err = av_hwdevice_ctx_create(&m_hw_device_ref, type,to_string(0).c_str(), NULL, 0)) < 0)
    {
        //LOG4CPLUS_ERROR(g_logger,"Failed to create specified HW device.");
        return err;
    }
    ctx->hw_device_ctx = av_buffer_ref(m_hw_device_ref);

    return err;
}



int Decoder4GPU::decode_write(AVCodecContext *avctx, AVPacket *packet)
{
    AVFrame *tmp_frame = NULL;
    int ret = 0;
    if (packet->buf == NULL) {
        cout << "packet.buf:" << packet->buf << endl;
        //LOG4CPLUS_ERROR(g_logger, "packet.buf:" << packet->buf);
        return -1;
    }
    ret = avcodec_send_packet(avctx, packet);
    if (ret < 0) {
        m_msg = string("during decoding,") + g_ffmpeg_err;
        LOG4CPLUS_WARN(g_logger,m_msg);
        return ret;
    }
   
    while (1) {
        AVFrame *sw_frame = NULL;
        ret = avcodec_receive_frame(avctx, m_avframe);
        if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
            return ret;
        } else if (ret < 0) {
            LOG4CPLUS_WARN(g_logger, m_rtsp_info.ip + " while decoding");
            return ret;
        }

        if (m_avframe->format == m_hw_pix_fmt)
        {
            if(!(sw_frame = av_frame_alloc()))
            {
                LOG4CPLUS_ERROR(g_logger, m_rtsp_info.ip + " Can not alloc avframe,"+g_ffmpeg_err);
                return false;
            }
            /* retrieve data from GPU to CPU */
            if ((ret = av_hwframe_transfer_data(sw_frame, m_avframe, 0)) < 0)
            {
                LOG4CPLUS_ERROR(g_logger, m_rtsp_info.ip + " Error transferring the data to system memory");
                return ret;
            }
            tmp_frame = sw_frame;

        }
        else
        {
            tmp_frame = m_avframe;
        }
        //cout << tmp_frame->buf << endl;
        //yuvj420p to yuv420p
        int out_size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, tmp_frame->width, tmp_frame->height, 1);
        m_out_buffer = (uint8_t*)malloc(out_size);
        m_pFrameYUV = av_frame_alloc();
        av_image_fill_arrays(m_pFrameYUV->data, m_pFrameYUV->linesize, m_out_buffer,
            AV_PIX_FMT_YUV420P, m_video_width, m_video_height, 1);
        m_img_convert_ctx = sws_getContext(m_video_width, m_video_height, (enum AVPixelFormat)tmp_frame->format,
            m_video_width, m_video_height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
        int ret = sws_scale(m_img_convert_ctx, (const unsigned char* const*)tmp_frame->data, tmp_frame->linesize, 0, tmp_frame->height, m_pFrameYUV->data, m_pFrameYUV->linesize);
       // std::cout << ret << endl;
        if (ret)
            Display(m_pFrameYUV);
        
        sws_freeContext(m_img_convert_ctx);
        free(m_out_buffer);
        av_frame_free(&m_pFrameYUV);

        if (sw_frame != NULL)
        {
            av_frame_free(&sw_frame);
            sw_frame = NULL;
        }
        return 0;
    }
    
}

AVPixelFormat Decoder4GPU::GetHwFormat(AVCodecContext* ctx, const AVPixelFormat* pix_fmts)
{
    const enum AVPixelFormat* p;
    Decoder4GPU* pThis = (Decoder4GPU*)ctx->opaque;
    for (p = pix_fmts; *p != -1; p++) {
        if (*p == pThis->m_hw_pix_fmt && *p == AV_PIX_FMT_QSV) {
            if (pThis->HwQsvDecoderInit(ctx) < 0)
                return AV_PIX_FMT_NONE;
            return *p;
        }
    }

    fprintf(stderr, "Failed to get HW surface format.\n");
    return AV_PIX_FMT_NONE;
}

int Decoder4GPU::HwQsvDecoderInit(AVCodecContext* ctx)
{
    Decoder4GPU* pThis = (Decoder4GPU*)ctx->opaque;
    AVHWFramesContext* frames_ctx;
    AVQSVFramesContext* frames_hwctx;

    /* create a pool of surfaces to be used by the decoder */
    ctx->hw_frames_ctx = av_hwframe_ctx_alloc(pThis->m_hw_device_ref);
    if (!ctx->hw_frames_ctx)
        return -1;
    frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
    frames_hwctx = (AVQSVFramesContext*)frames_ctx->hwctx;

    frames_ctx->format = AV_PIX_FMT_QSV;
    frames_ctx->sw_format = ctx->sw_pix_fmt;
    frames_ctx->width = FFALIGN(ctx->coded_width, 32);
    frames_ctx->height = FFALIGN(ctx->coded_height, 32);
    frames_ctx->initial_pool_size = 16;

    frames_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;

    return av_hwframe_ctx_init(ctx->hw_frames_ctx);
}

AVCodecContext* Decoder4GPU::GetQsvDecoder(AVStream* stream)
{
    AVCodecContext* decoder_ctx = nullptr;
    int ret = av_hwdevice_ctx_create(&m_hw_device_ref, AV_HWDEVICE_TYPE_QSV,
        "auto", NULL, 0);
    if (ret < 0) {

        goto failed;
    }
    AVCodec* find_decoder = avcodec_find_decoder(stream->codec->codec_id);
    if (!find_decoder) {
        goto failed;
    }
    find_decoder = avcodec_find_decoder_by_name((std::string(find_decoder->name) + "_qsv").c_str());
    if (!find_decoder) {
        goto failed;
    }

    for (int i = 0;; i++) {
        const AVCodecHWConfig* config = avcodec_get_hw_config(find_decoder, i);
        if (!config) {
            fprintf(stderr, "Decoder %s does not support device type %s.\n",
                find_decoder->name, av_hwdevice_get_type_name(AV_HWDEVICE_TYPE_QSV));
            goto failed;
        }
        if (config->device_type == AV_HWDEVICE_TYPE_QSV) {
            m_hw_pix_fmt = config->pix_fmt;
            //m_device_type = AV_HWDEVICE_TYPE_QSV;
            break;
        }
    }
    //if (m_AVStreamInfo.device_type == AV_HWDEVICE_TYPE_NONE ||
    //    m_AVStreamInfo.hw_pix_fmt == AV_PIX_FMT_NONE)
    //    goto failed;
    decoder_ctx = avcodec_alloc_context3(find_decoder);
    if (!decoder_ctx)
        goto failed;
    if (avcodec_parameters_to_context(decoder_ctx, stream->codecpar) < 0)
        goto failed;
    decoder_ctx->opaque = this;
    decoder_ctx->get_format = Decoder4GPU::GetHwFormat;  
    ret = avcodec_open2(decoder_ctx, NULL, NULL);
    if (ret < 0)
        goto failed;
    return decoder_ctx;
failed:
    m_hw_pix_fmt = AV_PIX_FMT_NONE;
    //m_AVStreamInfo.device_type = AV_HWDEVICE_TYPE_NONE;
    av_buffer_unref(&m_hw_device_ref);
    m_hw_device_ref = nullptr;
    avcodec_free_context(&decoder_ctx);
    return nullptr;
}

bool Decoder4GPU::init() {
    int ret;
    int i;
    FreeResource();
    FreeGpuResource();
    //https://developer.nvidia.com/ffmpeg
    enum AVHWDeviceType type = av_hwdevice_find_type_by_name("qsv");
    if (type == AV_HWDEVICE_TYPE_NONE) {
        fprintf(stderr, "Device type cuda is not supported.\n");
        fprintf(stderr, "Available device types:");
        while((type = av_hwdevice_iterate_types(type)) != AV_HWDEVICE_TYPE_NONE)
            fprintf(stderr, " %s", av_hwdevice_get_type_name(type));
        fprintf(stderr, "\n");
        return false;
    }

    m_input_avfmt_ctx = avformat_alloc_context();//申请一个AVFormatContext结构的内存,并进行简单初始化
    m_input_avfmt_ctx->interrupt_callback.callback = interrupt_callback;
    m_input_avfmt_ctx->interrupt_callback.opaque = &m_input_runner;

    m_input_runner.lasttime = time(NULL);
    av_dict_set(&m_opts, "rtsp_transport",  "tcp", 0); //rtsp默认采用UDP,容易丢包，造成花屏，这里改为TCP
    //开流超时设计
    av_dict_set(&m_opts, "stimeout", "3000000", 0);//单位us 也就是这里设置的是3s
    av_dict_set(&m_opts, "rw_timeout", "3000", 0);//单位:ms
    int result=avformat_open_input(&m_input_avfmt_ctx, m_channel_set.rtsp_url.c_str(),nullptr,&m_opts);
    if (result<0){
        m_msg = m_rtsp_info.ip +",can't open video stream,"+g_ffmpeg_err;
        //LOG4CPLUS_ERROR(g_logger,m_msg);
        return false;
    }

    //获取视频流信息
    result=avformat_find_stream_info(m_input_avfmt_ctx,nullptr);
    if (result<0){
        m_msg = m_rtsp_info.ip +",can't get stream info,"+g_ffmpeg_err;
        LOG4CPLUS_ERROR(g_logger,m_msg);
        return false;
    }
    //Output Info-----------------------------
    printf("---------------- File Information ---------------\n");
    av_dump_format(m_input_avfmt_ctx, 0, m_channel_set.rtsp_url.c_str(), 0);
    printf("-------------------------------------------------\n");

   
    /* find index of the video stream  and returns the decoder for the selected stream*/
    ret = av_find_best_stream(m_input_avfmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &m_avcodec, 0);
    if (ret < 0) {
        m_msg = m_rtsp_info.ip +",Cannot find a video stream in the input file,"+g_ffmpeg_err;
        //LOG4CPLUS_ERROR(g_logger,m_msg);
        return false;
    }
    m_video_stream_index = ret;

    AVStream* video = m_input_avfmt_ctx->streams[m_video_stream_index];

    //intel GPU
    if (type == AV_HWDEVICE_TYPE_QSV)
    {
        m_decoder_ctx = GetQsvDecoder(video);
        if (m_decoder_ctx)
            HwQsvDecoderInit(m_decoder_ctx);
        else
        {
            m_msg = "cant't find Decoder";
            LOG4CPLUS_ERROR(g_logger, m_msg);
            return false;
        }
    }
    else
    {
        if (!(m_decoder_ctx = avcodec_alloc_context3(m_avcodec)))
        {
            m_msg = m_rtsp_info.ip + ",avcodec_alloc_context3 failed," + g_ffmpeg_err;
            //LOG4CPLUS_ERROR(g_logger,m_msg);
            return false;
        }

        if (avcodec_parameters_to_context(m_decoder_ctx, video->codecpar) < 0)
            return false;

        for (i = 0;; i++) {
            //Retrieve supported hardware configurations for a codec
            const AVCodecHWConfig* config = avcodec_get_hw_config(m_avcodec, i);
            if (!config) {
                fprintf(stderr, "Decoder %s does not support device type %s.\n",
                    m_avcodec->name, av_hwdevice_get_type_name(type));
                return false;
            }
            if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX &&
                config->device_type == type) {
                //found the hw for type,找到了硬件解码器，记录对应的AVPixelFormat
                m_hw_pix_fmt = config->pix_fmt;
                break;
            }
        }
        //create a AVBufferRef for m_decoder_ctx
        if (hw_decoder_init(m_decoder_ctx, type) < 0)
            return false;

        if ((ret = avcodec_open2(m_decoder_ctx, m_avcodec, NULL)) < 0) {
            //LOG4CPLUS_ERROR(g_logger,m_rtsp_info.ip + ",Failed to open codec for stream #" + to_string(m_video_stream_index)+g_ffmpeg_err);
            return false;
        }
    }

    AVRational fps_avrational = m_input_avfmt_ctx->streams[m_video_stream_index]->avg_frame_rate; //帧率
    if (fps_avrational.den != 0)
    {
        m_fps = fps_avrational.num / fps_avrational.den;
    }
    //获取视频流的分辨率大小
    m_video_width=m_decoder_ctx->width;
    m_video_height=m_decoder_ctx->height;

    if (!(m_avframe = av_frame_alloc())) {
        //LOG4CPLUS_ERROR(g_logger, m_rtsp_info.ip + " Can not alloc frame,"+g_ffmpeg_err);
        return false;
    }

    DisplayInit(m_video_width, m_video_height);
    


    HANDLE hThread = CreateThread(NULL, 0, StartFunc, (void*)this, 0, NULL);
    //LOG4CPLUS_INFO(g_logger,m_rtsp_info.ip + " init stream succussfully:"+  to_string(m_video_width) + "x" + to_string(m_video_height) +","+to_string(m_fps));
    return true;
}

DWORD WINAPI Decoder4GPU::StartFunc(void* Param)
{
    Decoder4GPU* ptr = (Decoder4GPU*)Param;
    ptr->GetPicture();
    return 0;
}

int Decoder4GPU::GetPicture() {
    
    AVPacket packet;
    int ret, got_picture;
    static int failedCount = 0;

    while (1)
    {
        m_input_runner.lasttime = time(NULL);
        if (m_input_avfmt_ctx != NULL)     //long time is connected,the AVFormatContext will be setted to null
        {
            //获取一帧压缩数据，如h.264
            if (av_read_frame(m_input_avfmt_ctx, &packet) < 0) {
                av_packet_unref(&packet); //释放资源,否则内存会一直上升
                failedCount++;
                if (failedCount > 3) {
                    m_msg = m_rtsp_info.ip + ",no get a packet," + g_ffmpeg_err;;
                    LOG4CPLUS_ERROR(g_logger,m_msg );
                    m_err_code = "2301103";
                    failedCount = 0;
                    this->init();
                }
                return false;
            }
        }
        else {
            failedCount++;
            if (failedCount > (10 * 1000000 / SLEEP_US))
            {
                m_msg = m_rtsp_info.ip + ",AVFormatContext is null,will reconnenct," + g_ffmpeg_err;;
                LOG4CPLUS_ERROR(g_logger, m_msg);
                m_err_code = "2301103";
                failedCount = 0;
                this->init();
            }
            return false;
        }
        if (m_video_width != m_decoder_ctx->width || m_video_height != m_decoder_ctx->height) {
            m_video_width = m_decoder_ctx->width;
            m_video_height = m_decoder_ctx->height;
            m_msg = m_rtsp_info.ip + " w*h have changed to " + to_string(m_video_width) + "x" + to_string(m_video_height);
            //LOG4CPLUS_INFO(g_logger,m_msg);


        }
        if (m_video_stream_index == packet.stream_index) {
            SDL_WaitEvent(&m_event);
            if (m_event.type == SFM_REFRESH_EVENT) {
                ret = decode_write(m_decoder_ctx, &packet);
            }
            else if (m_event.type == SDL_KEYDOWN) {
                //Pause
                cout << "pause key" << endl;
                if (m_event.key.keysym.sym == SDLK_SPACE)
                    m_thread_pause = !m_thread_pause;
            }
            else if (m_event.type == SDL_QUIT) {
                m_thread_exit = 1;
            }
            else if (m_event.type == SFM_BREAK_EVENT) {
                break;
            }
        }

        av_packet_unref(&packet);//释放资源,否则内存会一直上升
    }
}



void Decoder4GPU::FreeGpuResource(void) {
//cpu版本是由AVFormatContext分配了，释放AVFormatContext就可以了。gpu版本都需要释放
    if (m_decoder_ctx != NULL)
    {
        //LOG4CPLUS_INFO(g_logger,"channel_" << m_channel_num << " free decoderContext ");
        avcodec_free_context(&m_decoder_ctx);
        m_decoder_ctx = NULL;
    }
    if (m_hw_device_ref) {
        //LOG4CPLUS_INFO(g_logger, "channel_" << m_channel_num << " free hw_device_ctx");
        av_buffer_unref(&m_hw_device_ref);
        m_hw_device_ref = NULL;
    }
}

static AVBufferRef *hw_device_ctx = NULL;

static int set_hwframe_ctx(AVCodecContext *ctx, AVBufferRef *hw_device_ctx,int width,int height)
{
    AVBufferRef *hw_frames_ref;
    AVHWFramesContext *frames_ctx = NULL;
    int err = 0;

    if (!(hw_frames_ref = av_hwframe_ctx_alloc(hw_device_ctx))) {
        fprintf(stderr, "Failed to create CUDA frame context.\n");
        return -1;
    }
    frames_ctx = (AVHWFramesContext *)(hw_frames_ref->data);
    frames_ctx->format    = AV_PIX_FMT_CUDA;
    frames_ctx->sw_format = AV_PIX_FMT_NV12;
    frames_ctx->width     = width;
    frames_ctx->height    = height;
    frames_ctx->initial_pool_size = 20;
    if ((err = av_hwframe_ctx_init(hw_frames_ref)) < 0) {
        fprintf(stderr, "Failed to initialize CUDA frame context."
                        "Error code: %d\n",err);
        av_buffer_unref(&hw_frames_ref);
        return err;
    }
    ctx->hw_frames_ctx = av_buffer_ref(hw_frames_ref);
    if (!ctx->hw_frames_ctx)
        err = AVERROR(ENOMEM);

    av_buffer_unref(&hw_frames_ref);
    return err;
}
