//
// Created by gogoqiu on 2022/3/3.
//

/**
 * 最简单的基于FFmpeg的AVDevice例子（读取摄像头）
 * Simplest FFmpeg Device (Read Camera)
 *
 * 雷霄骅 Lei Xiaohua
 * leixiaohua1020@126.com
 * 中国传媒大学/数字电视技术
 * Communication University of China / Digital TV Technology
 * http://blog.csdn.net/leixiaohua1020
 *
 * 本程序实现了本地摄像头数据的获取解码和显示。是基于FFmpeg
 * 的libavdevice类库最简单的例子。通过该例子，可以学习FFmpeg中
 * libavdevice类库的使用方法。
 * 本程序在Windows下可以使用2种方式读取摄像头数据：
 *  1.VFW: Video for Windows 屏幕捕捉设备。注意输入URL是设备的序号，
 *          从0至9。
 *  2.dshow: 使用Directshow。注意作者机器上的摄像头设备名称是
 *         “Integrated Camera”，使用的时候需要改成自己电脑上摄像头设
 *          备的名称。
 * 在Linux下可以使用video4linux2读取摄像头设备。
 * 在MacOS下可以使用avfoundation读取摄像头设备。
 *
 * This software read data from Computer's Camera and play it.
 * It's the simplest example about usage of FFmpeg's libavdevice Library.
 * It's suiltable for the beginner of FFmpeg.
 * This software support 2 methods to read camera in Microsoft Windows:
 *  1.gdigrab: VfW (Video for Windows) capture input device.
 *             The filename passed as input is the capture driver number,
 *             ranging from 0 to 9.
 *  2.dshow: Use Directshow. Camera's name in author's computer is
 *             "Integrated Camera".
 * It use video4linux2 to read Camera in Linux.
 * It use avfoundation to read Camera in MacOS.
 *
 */


#include <stdio.h>
//#include <libavutil/time.h>

#define __STDC_CONSTANT_MACROS

#ifdef _WIN32
//Windows
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavdevice/avdevice.h"
#include "SDL/SDL.h"
};
#else
//Linux...
#ifdef __cplusplus
extern "C"
{
#endif
#include <libavutil/time.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavdevice/avdevice.h>
#include <SDL2/SDL.h>
    /*
#include <X11/Xlib.h>
#include <sys/ipc.h>
#include <sys/shm.h>
#include <X11/extensions/XShm.h>
*/
#ifdef __cplusplus
};
#endif
#endif

//Output YUV420P
#define OUTPUT_YUV420P 0
//'1' Use Dshow
//'0' Use VFW
#define USE_DSHOW 0


//Refresh Event
#define SFM_REFRESH_EVENT  (SDL_USEREVENT + 1)

#define SFM_BREAK_EVENT  (SDL_USEREVENT + 2)
#define AV_CODEC_FLAG_GLOBAL_HEADER (1 << 22)
#define CODEC_FLAG_GLOBAL_HEADER AV_CODEC_FLAG_GLOBAL_HEADER

int thread_exit=0;

//Output FFmpeg's av_log()
void custom_log(void *ptr, int level, const char *fmt, va_list vl) {
    FILE *fp = fopen("/tmp/av_log.txt", "a+");
    if (fp) {
        vfprintf(fp, fmt, vl);
        fflush(fp);
        fclose(fp);
    }
}

int sfp_refresh_thread(void *opaque)
{
    thread_exit=0;
    while (!thread_exit) {
        SDL_Event event;
        event.type = SFM_REFRESH_EVENT;
        SDL_PushEvent(&event);
        SDL_Delay(40);
    }
    thread_exit=0;
    //Break
    SDL_Event event;
    event.type = SFM_BREAK_EVENT;
    SDL_PushEvent(&event);

    return 0;
}

void postBuf(){

}

//Show Dshow Device
void show_dshow_device(){
    AVFormatContext *pFormatCtx = avformat_alloc_context();
    AVDictionary* options = nullptr;
    av_dict_set(&options,"list_devices","true",0);
    AVInputFormat *iFormat = av_find_input_format("dshow");
    printf("========Device Info=============\n");
    avformat_open_input(&pFormatCtx,"video=dummy",iFormat,&options);
    printf("================================\n");
}

//Show Dshow Device Option
void show_dshow_device_option(){
    AVFormatContext *pFormatCtx = avformat_alloc_context();
    AVDictionary* options = nullptr;
    av_dict_set(&options,"list_options","true",0);
    AVInputFormat *iFormat = av_find_input_format("dshow");
    printf("========Device Option Info======\n");
    avformat_open_input(&pFormatCtx,"video=Integrated Camera",iFormat,&options);
    printf("================================\n");
}

//Show VFW Device
void show_vfw_device(){
    AVFormatContext *pFormatCtx = avformat_alloc_context();
    AVInputFormat *iFormat = av_find_input_format("vfwcap");
    printf("========VFW Device Info======\n");
    avformat_open_input(&pFormatCtx,"list",iFormat,nullptr);
    printf("=============================\n");
}

//Show AVFoundation Device
void show_avfoundation_device(){
    AVFormatContext *pFormatCtx = avformat_alloc_context();
    AVDictionary* options = nullptr;
    av_dict_set(&options,"list_devices","true",0);
    AVInputFormat *iFormat = av_find_input_format("avfoundation");
    printf("==AVFoundation Device Info===\n");
    avformat_open_input(&pFormatCtx, "", iFormat, &options);
    printf("=============================\n");
}

//int yuv_width;
//int yuv_height;
//int y_length;
// uv_length;
int64_t start_time;
AVStream* video_stream;

//
AVFormatContext * rtmp_format_context;
//
AVCodec* tmpCodec;
//
AVCodecContext* tmpCodecContext;
#define RTMP_URL "rtmp://192.168.5.200:1955/hls/room1"
static int initialize_rtmp( int width, int height ){

    //avformat_alloc_context();
    avformat_alloc_output_context2( &rtmp_format_context, nullptr, "flv", RTMP_URL );
    //AV_CODEC_ID_H264
    /*
    if ( avcodec_find_encoder( AV_CODEC_ID_H264 ) == nullptr ){
        printf("can't find h264 encode!\n");
        return -1;
    };*/
    //tmpCodec = avcodec_find_encoder( AV_CODEC_ID_FLV1 );
    tmpCodec = avcodec_find_encoder( AV_CODEC_ID_H264 );
    //tmpCodec = avcodec_find_encoder( AV_CODEC_ID_H265 );
    if (!tmpCodec) {
        //LOGE("Can not find encoder!\n");
        //
        printf("can't find h264 encode!\n");
        return -1;
    }
    //
    tmpCodecContext = avcodec_alloc_context3(tmpCodec);
    if (!tmpCodecContext) {
        //LOGE("Could not allocate video codec context\n");
        return -1;
    }
    //rtmp_format_context->
    //
    tmpCodecContext->pix_fmt =  AV_PIX_FMT_YUV420P;//PIX_FMT_YUV420P新版加
    //tmpCodecContext->pix_fmt = AV_PIX_FMT_;
            //
    tmpCodecContext->width = width;
    tmpCodecContext->height = height;
    tmpCodecContext->time_base.num = 1;
    tmpCodecContext->time_base.den = 25;
    tmpCodecContext->bit_rate = 400000;
    tmpCodecContext->gop_size = 250;
    // Some formats want stream headers to be separate.
    if (rtmp_format_context->oformat->flags & AVFMT_GLOBALHEADER)
        tmpCodecContext->flags |= CODEC_FLAG_GLOBAL_HEADER;
    tmpCodecContext->qmin = 10;
    tmpCodecContext->qmax = 51;
    tmpCodecContext->max_b_frames = 0;

    AVDictionary *param = nullptr;
    av_opt_set(tmpCodecContext->priv_data, "preset", "ultrafast", 0);
    av_opt_set(tmpCodecContext->priv_data, "tune", "zerolatency", 0);
    AVDictionary *opts = nullptr;
    //av_dict_set(&opts, "b", "2.5M", AV_DICT_MATCH_CASE );
    av_dict_set(&opts, "b", "2.5M", 0 );
    //av_log();
    /*
     * https://github.com/tanersener/ffmpeg-kit/issues/287
     * h264_v4l2m2m is a video codec based on Video4Linux2 supported hw devices.
     * You must have a hw device to be able use it. Could not find a valid device error means you don't have it.

    ffmpeg supports x264 and openh264 h264 encoders on Android.
     x264 is included in GPL licensed packages. You can either use one of those packages or build ffmpeg-kit yourself with openh264.

     https://www.bilibili.com/read/cv7998088
     MX系列显卡，MX110、MX130、MX150、MX230、MX250、MX330、MX350、MX450等等,是只能打游戏的显卡，下面的两种都用不了没有NVENC（编码） ，NVDEC（解码）
    我当时就应该买GTX950M不买MX150了，干，什么视频加速，直播录屏的功能都用不了（老黄真抠） 作者：Challenger-Li https://www.bilibili.com/read/cv7998088 出处：bilibili

     chrome h264
     chrome其实默认已经支持hevc了，但开关默认是关闭了，我们需要将相关的宏打开，修改相应的探测函数。
    rx570   hevc
     Intel GPU及NVIDIA GPU对视频编解码支持情况
     https://blog.csdn.net/andrew57/article/details/79267950
     8代酷睿Coffee Lake首测 Intel i5 8250U移动CPU处理器性能对比评测
     gogoqiu@notepad:~$ lspci|grep VGA
    00:02.0 VGA compatible controller: Intel Corporation UHD Graphics 620 (rev 07)

     */
    if (avcodec_open2(tmpCodecContext, tmpCodec, &opts ) < 0) {
        //LOGE("Failed to open encoder!\n");
        return -1;
    }
    //
    video_stream = avformat_new_stream(rtmp_format_context, tmpCodec);//avformat_new_stream创建流通道
    if (video_stream == nullptr) {
        return -1;
    }
    video_stream->time_base.num = 1;
    video_stream->time_base.den = 30;
    //
    video_stream->codec = tmpCodecContext;

    //Open output URL,set before avformat_write_header() for muxing
    if (avio_open(&rtmp_format_context->pb, RTMP_URL, AVIO_FLAG_READ_WRITE) < 0) {
        //LOGE("Failed to open output file!\n");
        return -1;
    }

    //Write File Header
    avformat_write_header(rtmp_format_context, nullptr);

    start_time = av_gettime();
    return 0;
}

int frameCount;
/*
 * build the dest context
 * codec
 * packet
 * frame
 */
static int push_texture( AVFrame *tmpFrame ){
    int ret;
    int enc_got_frame;
    frameCount++;
    AVPacket enc_pkt;
    enc_pkt.data = nullptr;
    enc_pkt.size = 0;
    av_init_packet(&enc_pkt);//初始化 AVPacker
    //
    ret = avcodec_encode_video2( tmpCodecContext, &enc_pkt, tmpFrame, &enc_got_frame);
    //av_frame_free(&tmpFrame);

    if (enc_got_frame == 1) {
        enc_pkt.stream_index = video_stream->index;

        //Write PTS
        AVRational time_base = rtmp_format_context->streams[0]->time_base;//{ 1, 1000 };
        AVRational r_framerate1 = {60, 2};//{ 50, 2 };
        AVRational time_base_q = {1, AV_TIME_BASE};
        //Duration between 2 frames (us)
        int64_t calc_duration = (double) (AV_TIME_BASE) * (1 / av_q2d(r_framerate1));    //内部时间戳
        //Parameters
        //enc_pkt.pts = (double)(framecnt*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
        enc_pkt.pts = av_rescale_q(frameCount * calc_duration, time_base_q, time_base);
        enc_pkt.dts = enc_pkt.pts;
        enc_pkt.duration = av_rescale_q(calc_duration, time_base_q,
                                        time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
        enc_pkt.pos = -1;

        //Delay
        int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
        int64_t now_time = av_gettime() - start_time;
        if (pts_time > now_time)
            av_usleep(pts_time - now_time);
        //
        ret = av_interleaved_write_frame(rtmp_format_context, &enc_pkt);//将AVPacket（存储视频压缩码流数据）写入文件。
        av_free_packet(&enc_pkt);
    }
    return 0;
}

/*
 * av_image_fill_arrays
 * av_image_fill_arrays
 * AVDictionary *_dic = nullptr;
av_dict_set (&_dic, "vcodec", "mjpeg", 0);
int _ret = avformat_open_input (&_fmt_ctx, "video=xxxcamera", _ipt_fmt, _dic);
ffmpeg如何以mjpeg方式打开摄像头？
作者：Fawdlstty
链接：https://www.zhihu.com/question/318684293/answer/642421895
来源：知乎
著作权归作者所有。商业转载请联系作者获得授权，非商业转载请注明出处。
 https://github.com/intel/media-driver/
 https://zhuanlan.zhihu.com/p/372361709
 Ubuntu20.04 ffmpeg添加 Intel核显QSV加速支持
 https://cloud.tencent.com/developer/article/1386851
 Ubuntu Intel显卡驱动安装 （Ubuntu 14.04--Ubuntu 16.10 + Intel® Graphics Update Tool）
 How to use Intel H.264 HW encoder with NoMachine on Linux platforms
 https://knowledgebase.nomachine.com/AR09O00938
 FFmpeg 开启QSV硬解加速
 https://blog.csdn.net/qq_25884511/article/details/84647828
 H.264转码加速：NVENC大战Quick Sync - 超能网
 */
int main(int argc, char* argv[])
{
    AVFormatContext	*pFormatCtx;
    int				i, videoStreamIndex;
    //render codec context
    AVCodecContext	*pDeviceVideoCodecCtx;
    AVCodec			*pCodec;

    av_log_set_callback(custom_log);
    av_register_all();
    avformat_network_init();

    //Open File
    //char filepath[]="src01_480x272_22.h265";
    //avformat_open_input(&pFormatCtx,filepath,NULL,NULL)

    //Register Device
    avdevice_register_all();

    //
    pFormatCtx = avformat_alloc_context();

    //AVCodec *  pH264Codec = avcodec_find_encoder_by_name("h264_qsv");
    AVCodec *  pH264Codec = avcodec_find_encoder_by_name("h264_vaapi");
    //AVCodec *  pH264Codec = avcodec_find_encoder_by_name("libx264");
    //AVCodec *  pH264Codec = avcodec_find_encoder(AV_CODEC_ID_H264);
    if(nullptr == pH264Codec)
    {
        printf("%s", "avcodec_find_encoder failed");
        return  -1;
    }

    AVCodec * pc = avcodec_find_encoder( AV_CODEC_ID_H264 );
    pc = avcodec_find_encoder_by_name("h264_vaapi");
    pc = avcodec_find_encoder_by_name( "h264_v4l2m2m");
    if (!pc) {
        //goto close_muxer;
        return -1;
    }

    // prepare encoder
    //https://github.com/ccrisan/motioneyeos/issues/1921
    AVCodecContext * pcc = avcodec_alloc_context3(pc);
    pcc->pix_fmt = pc->pix_fmts[0];
    //pcc->pix_fmt = AV_PIX_FMT_YUV420P;
    pcc->codec_id = pc->id;
    pcc->codec_type = pc->type;
    pcc->time_base.num = 1;
    pcc->time_base.den = 1;
    pcc->width = 640;
    pcc->height = 480;
    //avcodec_open2 probing device /dev/video0
    //A hardware frames reference is required to associate the encoding device.
//#https://wiki.debian.org/HardwareVideoAcceleration#:~:text=Driver%20selection%20can%20be%20overridden%20by%20setting%20the,use%20the%20driver%20from%20intel-media-va-driver%20on%20Debian%2010%2FBuster%29.
//export LIBVA_DRIVER_NAME=iHD
    int ok = avcodec_open2(pcc, pc, nullptr );
    if (ok != 0) {
        //goto free_encoder;
        return -1;
    }
//Windows
#ifdef _WIN32

    //Show Dshow Device
	show_dshow_device();
	//Show Device Options
	show_dshow_device_option();
    //Show VFW Options
    show_vfw_device();

#if USE_DSHOW
	AVInputFormat *ifmt=av_find_input_format("dshow");
	//Set own video device's name
	if(avformat_open_input(&pFormatCtx,"video=Integrated Camera",ifmt,NULL)!=0){
		printf("Couldn't open input stream.\n");
		return -1;
	}
#else
	AVInputFormat *ifmt=av_find_input_format("vfwcap");
	if(avformat_open_input(&pFormatCtx,"0",ifmt,NULL)!=0){
		printf("Couldn't open input stream.\n");
		return -1;
	}
#endif
#elif defined linux
    //Linux
    AVInputFormat *ifmt = av_find_input_format("video4linux2");
	//avformat_open_input, avformat_alloc_output_context2
	//int avformat_open_input(AVFormatContext **ps, const char *url,
	//                  ff_const59 AVInputFormat *fmt, AVDictionary **options);
	//int avformat_alloc_output_context2(AVFormatContext **ctx, ff_const59 AVOutputFormat *oformat,
    //                                   const char *format_name, const char *filename);
    if(avformat_open_input( &pFormatCtx,"/dev/video0",ifmt,nullptr )!=0){
        printf("Couldn't open input stream.\n");
        return -1;
    }
#else
    show_avfoundation_device();
    //Mac
    AVInputFormat *ifmt=av_find_input_format("avfoundation");
    //Avfoundation
    //[video]:[audio]
    if(avformat_open_input(&pFormatCtx,"0",ifmt,NULL)!=0){
        printf("Couldn't open input stream.\n");
        return -1;
    }
#endif

    if(avformat_find_stream_info(pFormatCtx,nullptr)<0){
        printf("Couldn't find stream information.\n");
        return -1;
    }
    //
    videoStreamIndex=-1;
    int audioStreamIndex =-1;
    //如何改变流格式
    for(i=0; i<pFormatCtx->nb_streams; i++) {
        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoStreamIndex = i;
            //break;
        }
        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
            audioStreamIndex = i;
            //break;
        }
    }
    if(videoStreamIndex == -1){
        printf("Couldn't find a video stream.\n");
        return -1;
    }
    //
    av_dump_format(pFormatCtx, videoStreamIndex, "video4linux2", 0 );
    /*
     * Input #0, video4linux2,v4l2, from '(null)':
  Duration: N/A, start: 1327349.665138, bitrate: 147456 kb/s
    Stream #0:0: Video: rawvideo (YUY2 / 0x32595559), yuyv422, 1280x720, 147456 kb/s, 10 fps, 10 tbr, 1000k tbn, 1000k tbc
     * */
    //every stream have its codec context
    //find it
    pDeviceVideoCodecCtx = pFormatCtx->streams[videoStreamIndex]->codec;
    //pDeviceVideoCodecCtx->
    //source codec
    pCodec = avcodec_find_decoder(pDeviceVideoCodecCtx->codec_id);
    if(pCodec==nullptr){
        //can't parse
        printf("Codec not found.\n");
        return -1;
    }
    //pCodec: raw video
    //
    if(avcodec_open2(pDeviceVideoCodecCtx, pCodec, nullptr) < 0)
    {
        printf("Could not open codec.\n");
        return -1;
    }
    //code name: rawvideo
    printf( "code name: %s\n", pCodec->name );

    int video_width,video_height;
    //SDL_Surface *screen;
    video_width = pDeviceVideoCodecCtx->width;
    video_height = pDeviceVideoCodecCtx->height;
    //
    if( initialize_rtmp( video_width, video_height ) <0 ){
        printf("Could not initialize rtmp.\n");
        return -1;
    }
    //initialize the rtmp pusher

    /*
     * */
    AVFrame	*pSrcFrame,*pFrameYUV;
    pSrcFrame=av_frame_alloc();
    pFrameYUV=av_frame_alloc();
    //av_image_get_buffer_size
    auto *out_buffer=(unsigned char *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P,
                                video_width, video_height ));
    //av_image_fill_arrays
    avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P,
                   video_width, video_height );
    //av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize,out_buffer,
    //                     AV_PIX_FMT_YUV420P,pDeviceVideoCodecCtx->width, pDeviceVideoCodecCtx->height,1);
    //SDL----------------------------
    if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
        printf( "Could not initialize SDL - %s\n", SDL_GetError());
        return -1;
    }

    //SDL_Renderer *sdlRenderer;
    //
    //screen = SDL_SetVideoMode(video_width, video_height, 0,0);
    SDL_Window *screen = SDL_CreateWindow("Simplest FFmpeg Read Camera",
                                          SDL_WINDOWPOS_UNDEFINED,
                                          SDL_WINDOWPOS_UNDEFINED,
                                          video_width, video_height,
                                          SDL_WINDOW_OPENGL );
    if(!screen) {
        printf("SDL: could not set video mode - exiting:%s\n",SDL_GetError());
        return -1;
    }
    SDL_Renderer *sdlRenderer = SDL_CreateRenderer(screen, -1, 0);
    //
    //SDL_Overlay *bmp;
    //bmp = SDL_CreateYUVOverlay(pDeviceVideoCodecCtx->width, pDeviceVideoCodecCtx->height,SDL_YV12_OVERLAY, screen);

    //Stream #0:0: Video: rawvideo (YUY2 / 0x32595559), yuyv422, 1280x720, 147456 kb/s, 10 fps, 10 tbr, 1000k tbn, 1000k tbc
    //yuyv422
    SDL_Texture *sdlTexture = SDL_CreateTexture(sdlRenderer,
                                                //SDL_PIXELFORMAT_ARGB8888,
                                                //SDL_PIXELFORMAT_YUY2,
                                                SDL_PIXELFORMAT_IYUV,
                                                SDL_TEXTUREACCESS_STREAMING,
                                                video_width, video_height );
    //packet->frame->texture
    SDL_Rect rect;
    rect.x = 0;
    rect.y = 0;
    rect.w = video_width;
    rect.h = video_height;
    //SDL End------------------------
    int ret, got_picture;
    //
    auto *packet=(AVPacket *)av_malloc(sizeof(AVPacket));

#if OUTPUT_YUV420P
    FILE *fp_yuv=fopen("output.yuv","wb+");
#endif

    struct SwsContext *img_convert_ctx;
    img_convert_ctx = sws_getContext( video_width, video_height,
                                     pDeviceVideoCodecCtx->pix_fmt,
                                      video_width, video_height,
                                     AV_PIX_FMT_YUV420P, SWS_BICUBIC, nullptr, nullptr, nullptr);
    //------------------------------
    SDL_Thread *video_tid = SDL_CreateThread( sfp_refresh_thread, "video", nullptr);
    //
    //SDL_WM_SetCaption("Simplest FFmpeg Read Camera",nullptr);
    //Event Loop
    SDL_Event event;
    //XShmDetach();
    //while(true)
    for (;;) {
        //Wait
        SDL_WaitEvent(&event);
        if(event.type==SFM_REFRESH_EVENT){
            //------------------------------
            if(av_read_frame(pFormatCtx, packet)>=0){
                //packet
                //??
                if(packet->stream_index == videoStreamIndex){
                    //how to update to sdl surface
                    //prepare:
                    //create window
                    //createTexture
                    //how to update to rtmp server
                    //prepare:
                    //  pSrcFrame=>pFrameYUV
                    //
                    //first get the frame( pSrcFrame )
                    //sws_scale( pFrameYUV )
                    //from the stream by the device
                    //use the codec context to decode the packet
                    ret = avcodec_decode_video2(pDeviceVideoCodecCtx, pSrcFrame, &got_picture, packet);
                    //pSrcFrame
                    if(ret < 0){
                        printf("Decode Error.\n");
                        return -1;
                    }
                    if(got_picture){
                        /*
                        SDL_LockYUVOverlay(bmp);
                        pFrameYUV->data[0]=bmp->pixels[0];
                        pFrameYUV->data[1]=bmp->pixels[2];
                        pFrameYUV->data[2]=bmp->pixels[1];
                        pFrameYUV->linesize[0]=bmp->pitches[0];
                        pFrameYUV->linesize[1]=bmp->pitches[2];
                        pFrameYUV->linesize[2]=bmp->pitches[1];
                        */
                        /*
                        int sws_scale(struct SwsContext *c, const uint8_t *const srcSlice[],
                                      const int srcStride[], int srcSliceY, int srcSliceH,
                                      uint8_t *const dst[], const int dstStride[]);*/
                        sws_scale(img_convert_ctx, (const unsigned char* const*)pSrcFrame->data,
                                  pSrcFrame->linesize, 0, pDeviceVideoCodecCtx->height,
                                  pFrameYUV->data, pFrameYUV->linesize);

                        pFrameYUV->format = AV_PIX_FMT_YUV420P;
                        pFrameYUV->width = video_width;
                        pFrameYUV->height = video_height;

                        push_texture( pFrameYUV );
#if OUTPUT_YUV420P
                        int y_size=pDeviceVideoCodecCtx->width*pDeviceVideoCodecCtx->height;
						fwrite(pFrameYUV->data[0],1,y_size,fp_yuv);    //Y
						fwrite(pFrameYUV->data[1],1,y_size/4,fp_yuv);  //U
						fwrite(pFrameYUV->data[2],1,y_size/4,fp_yuv);  //V
#endif

#if 0
                        SDL_UpdateTexture( sdlTexture, NULL, pFrameYUV->data[0], pFrameYUV->linesize[0] );
#else
                        //pFrameYUV

                        SDL_UpdateYUVTexture(sdlTexture, &rect,
                                             pFrameYUV->data[0], pFrameYUV->linesize[0],
                                             pFrameYUV->data[1], pFrameYUV->linesize[1],
                                             pFrameYUV->data[2], pFrameYUV->linesize[2]);
                        /*
                        SDL_UpdateYUVTexture(sdlTexture, &rect,
                                             pSrcFrame->data[0], pSrcFrame->linesize[0],
                                             pSrcFrame->data[1], pSrcFrame->linesize[1],
                                             pSrcFrame->data[2], pSrcFrame->linesize[2]);
                        */
#endif
                        //SDL_UnlockYUVOverlay(bmp);
                        //SDL_DisplayYUVOverlay(bmp, &rect);
                        //av_frame_free( &pFrameYUV );
                        SDL_RenderClear( sdlRenderer );
                        SDL_RenderCopy( sdlRenderer, sdlTexture,  nullptr, &rect );
                        SDL_RenderPresent( sdlRenderer );
                    }
                }
                av_free_packet(packet);
            }else{
                //Exit Thread
                thread_exit=1;
            }
        }else if(event.type==SDL_QUIT){
            thread_exit=1;
        }else if(event.type==SFM_BREAK_EVENT){
            break;
        }
    }
    //sws_freeContext(img_convert_ctx);

#if OUTPUT_YUV420P
    fclose(fp_yuv);
#endif

    SDL_Quit();

    //av_free(out_buffer);
    //av_free(pFrameYUV);
    avcodec_close(pDeviceVideoCodecCtx);
    avformat_close_input(&pFormatCtx);

    return 0;
}