#include <iostream>
#include "rtmp_pusher.h"
#include "http_server.h"
#include <thread> 

// 如果新建的是CPP文件，FFmpeg是纯C编写的，如果C++引用的话一定要加上 extern "C"
extern "C"
{
#include <libavformat/avformat.h>
#include <libavdevice/avdevice.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavfilter/avfilter.h>
#include <libswscale/swscale.h>
#include <libavutil/opt.h>
#include <libavutil/time.h>
}
#include "mq_msg.h"
#include "process_frame.h"

static const int MAX_BUFFER_SIZE = 1024*1024;

void rtmp_run()
{
    std::cout << "rtmp_run" << std::endl;
    MqMsgRecv mq_recv("/http2rtmp");
    bool start_yolo = false;

    //初始化网络
    avformat_network_init();
    //初始化设备
    avdevice_register_all();
    //输入封装器
    AVFormatContext* infmt_ctx = NULL;
    
    //输出封装器
    AVFormatContext* outfmt_ctx = NULL;
    
    //视频输入格式
    AVInputFormat* ifmt =NULL;
    
    //通过v4l2框架来获取视频输入格式
    ifmt = av_find_input_format("video4linux2");

    //视频输入设备
    char *in_filename  = "/dev/video12";
    
    //视频输出设备
    char *out_filename = "rtmp://localhost/live/stream";

    AVDictionary *options = nullptr;
    av_dict_set(&options, "video_size", "640x480", 0);
    av_dict_set(&options, "framerate", "30", 0);
    av_dict_set(&options, "pixel_format", "mjpeg", 0);
    
    //打开视频设备
    printf("opening video device\n");
    if (0 > avformat_open_input(&infmt_ctx, in_filename, ifmt, &options)) {
        printf("failed open input file\n");
        return;
    }
    
    //读取设备信息
    printf("reading video device info\n");
    if (0 > avformat_find_stream_info(infmt_ctx, NULL)) {
        printf("failed find stream info\n");
        avformat_close_input(&infmt_ctx);
        return;
    }
    
    
    //对流（Stream）的封装和抽象
    AVStream *in_stream = NULL;
    AVStream *out_stream = NULL;
    
    //视频流和音频流的标志
    int videoindex=-1;
    
    int i=0;
    int ret;
    //查找视频||音频流
    printf("finding video stream\n");
    for (i = 0; i < infmt_ctx->nb_streams; i++)
    {
        //Create output AVStream according to input AVStream
        
        if (infmt_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
        {
            videoindex = i;
        }
        else
        {
            break;
        }
    }
    
    if (videoindex == -1) 
    {
        printf("input video stream not exist\n");
        return;
    }
    
    
    AVCodec* encodec = NULL;
    AVCodec* decodec = NULL;
    
    
    //找到编码器
    printf("finding encoder\n");
    //encodec = avcodec_find_encoder(AV_CODEC_ID_H264);
    encodec = avcodec_find_encoder_by_name("libx264");
    if (!encodec) 
    {
        printf("not find encoder\n");
        avformat_close_input(&infmt_ctx);
        return;
    }
    
    
    AVCodecContext* encodec_ctx = NULL;
    AVCodecContext* decodec_ctx = NULL;
    
    decodec_ctx=infmt_ctx->streams[videoindex]->codec;
    //找到解码器
    printf("finding decoder\n");
    decodec = avcodec_find_decoder(decodec_ctx->codec_id);
    if (!decodec) 
    {
        printf("not find decoder\n");
        avformat_close_input(&infmt_ctx);
        return;
    }
    
    //创建编码器
    printf("creating encodec\n");
    encodec_ctx = avcodec_alloc_context3(encodec);
    if (!encodec_ctx) 
    {
        printf("not alloc context3\n\n");
        avformat_close_input(&infmt_ctx);
        return;
    }
    
    //打开解码器
    printf("opening dencodec\n");
    ret = avcodec_open2(decodec_ctx, decodec, NULL);
    if (ret < 0) {
        char achErrMsg[1024] = {0};
        av_make_error_string(achErrMsg, sizeof(achErrMsg), ret);
        fprintf(stderr, "Could not open codec: %s\n", achErrMsg);
        return;
    }
    
    
    //配置编码器参数
    encodec_ctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; 
    encodec_ctx->codec_id = encodec->id;
    encodec_ctx->thread_count = 8;
    //encodec_ctx->bit_rate = 800000;
    encodec_ctx->width = 640;
    encodec_ctx->height = 480;
    encodec_ctx->time_base = (AVRational){1, 30};    //5是编多少帧就发送，可根据编码速度改变
    encodec_ctx->framerate = (AVRational){30, 1};
    encodec_ctx->gop_size = 250;
    encodec_ctx->max_b_frames = 0;
    encodec_ctx->pix_fmt = AV_PIX_FMT_YUV420P;
    
    
    //编码质量和速度
    printf("set encodec param\n");
    av_opt_set(encodec_ctx->priv_data, "preset", "ultrafast", 0);
    av_opt_set(encodec_ctx->priv_data, "tune", "zerolatency", 0);


    AVDictionary *opts = NULL;
    av_dict_set(&opts, "profile", "baseline", 0);
    //av_opt_set(encodec_ctx->priv_data, "crf", "18", 0);
    
    //打开编码器
    printf("opening encodec\n");
    ret = avcodec_open2(encodec_ctx, encodec, &opts);
    if (ret < 0) {
        char achErrMsg[1024] = {0};
        av_make_error_string(achErrMsg, sizeof(achErrMsg), ret);
        fprintf(stderr, "Could not open codec: %s\n", achErrMsg);
        return;
    }
    
    //初始化输出封装器
    printf("outputing context\n");
    ret=avformat_alloc_output_context2(&outfmt_ctx, NULL, "flv", out_filename);
    if (ret != 0) {
        printf("failed alloc output context\n");
        avformat_close_input(&infmt_ctx);
        return;
    } 
    
    //添加视频流
    printf("new stream\n");
    out_stream = avformat_new_stream(outfmt_ctx,NULL);
    if (!out_stream) {
        printf("failed new stream\n");
        avformat_close_input(&infmt_ctx);
        avformat_close_input(&outfmt_ctx);
        return;
    }
    out_stream->codecpar->codec_tag = 0;
    //复制参数
    avcodec_parameters_from_context(out_stream->codecpar, encodec_ctx);
    
    //查看输出封装内容
    printf("====\n");
    av_dump_format(infmt_ctx, 0, "usb camera", 0);
    
    printf("====\n");
    av_dump_format(outfmt_ctx, 0, out_filename, 1);
    
    //打开rtmp的网络输出IO
    printf("opening avio\n");
    ret=avio_open(&outfmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);
    if (ret!=0) {
        printf("failed to open outfile\n");
        avformat_close_input(&infmt_ctx);
        avformat_close_input(&outfmt_ctx);
        return;
    }
    
    //写入封装头
    printf("writing av header\n");
    ret=avformat_write_header(outfmt_ctx, NULL);
    if (ret!=0) {
        printf("failed to write header\n");
        avio_close(outfmt_ctx->pb);
        avformat_close_input(&infmt_ctx);
        avformat_close_input(&outfmt_ctx);
        return;
    }
    
    
    AVPacket *dec_pkt,enc_pkt;
    //包裹申请内存
    dec_pkt = (AVPacket *)av_malloc(sizeof(AVPacket)); 
    memset(&enc_pkt, 0, sizeof(enc_pkt));
    //像素格式转换YU420
    struct SwsContext *img_convert_ctx  = NULL;
    img_convert_ctx  = sws_getCachedContext(img_convert_ctx, decodec_ctx->width, decodec_ctx->height,decodec_ctx->pix_fmt, encodec_ctx->width, encodec_ctx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, 0, 0, 0);
    if (!img_convert_ctx)
    {
        printf("fail to sws_getCachedContext\n");
    } 
    
    AVFrame *pFrameYUV,*pFrame ;
    //原始帧
    pFrame = av_frame_alloc();
    //输出帧
    pFrameYUV = av_frame_alloc();
    pFrameYUV->format = AV_PIX_FMT_YUV420P;
    pFrameYUV->width = 640;
    pFrameYUV->height = 480;
    pFrameYUV->pts = 0;
    
    ret = av_frame_get_buffer(pFrameYUV, 1);
    if (ret != 0)
    {
        printf("fail to frame get buffer\n");
        return;
    }
    //开始计时
    int64_t start_time = av_gettime();
    //标记
    int got_picture=0,enc_got_frame=0;
    //每一帧编号
    int vpts = 0;

    int64_t frame_delay = av_rescale_q(1, encodec_ctx->time_base, AV_TIME_BASE_Q);
    int64_t next_frame_time = av_gettime() + frame_delay;

    int yolo_idx = 0;
    
    while(1)
    {
        //每一帧加1
        pFrameYUV->pts = vpts;
        vpts += 1;
        //获取摄像头帧
        ret=av_read_frame(infmt_ctx,dec_pkt);
        if (ret != 0)
        {    
            printf("fail to read_frame\n");
            break;
        }
        
        //解码获取初始图片
        ret = avcodec_decode_video2(infmt_ctx->streams[dec_pkt->stream_index]->codec, pFrame, &got_picture, dec_pkt);
        if(!got_picture)
        {
            printf("123\n");
            av_packet_unref(&enc_pkt);
            continue;
        }
        //h264格式转换
        ret = sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, encodec_ctx->height, pFrameYUV->data, pFrameYUV->linesize);
        if (ret <= 0)
        {
            printf("123\n");
            av_packet_unref(&enc_pkt);
            continue;
        }


        
        // sws_scale(pSwsCtx, (const uint8_t* const*)pFrame->data,
        //                 pFrame->linesize, 0, decodec_ctx->height,
        //                 pFrameBGR->data, pFrameBGR->linesize);

        // int frameSize = avpicture_get_size(AV_PIX_FMT_BGR24, decodec_ctx->width, decodec_ctx->height);
        // cv::Mat img = cv::Mat::zeros(decodec_ctx->height, decodec_ctx->width, CV_8UC3);
        //memcpy(img.data, outBuff, frameSize);

        std::string msgRecvd = mq_recv.Recv();
        if(msgRecvd == "start_yolo") {
            printf("start yolo\n");
            start_yolo = true;
            yolo_idx = 0;
        } else if(msgRecvd == "stop_yolo") {
            printf("stop yolo\n");
            start_yolo = false;
            MqMsgSend mq_msg_send("/rtmp2http");
            mq_msg_send.Send("");
        }

        if(start_yolo)
        {
            FrameProcessor* pFrameProcessor = FrameProcessor::getInstance();
            pFrameProcessor->ProcessYUVFrame(pFrameYUV);
            yolo_idx++;
            if(yolo_idx > 450)
            {
                
            }
        }

        
        //输出帧编码
        ret = avcodec_send_frame(encodec_ctx, pFrameYUV);
        if (ret != 0)
        {
            printf("123\n");
            av_packet_unref(&enc_pkt);
            continue;
        }
        //打包到输出包裹
        ret = avcodec_receive_packet(encodec_ctx, &enc_pkt);
        if (ret != 0 || enc_pkt.size > 0)
        {
                //cout << "*" << pack.size << flush;
                
        }
        
        //推流
        // enc_pkt.pts = av_rescale_q(enc_pkt.pts, encodec_ctx->time_base, out_stream->time_base);
        // enc_pkt.dts = av_rescale_q(enc_pkt.dts, encodec_ctx->time_base, out_stream->time_base);
        // enc_pkt.duration = av_rescale_q(enc_pkt.duration, encodec_ctx->time_base, out_stream->time_base);

        enc_pkt.pts = av_rescale_q(pFrameYUV->pts, encodec_ctx->time_base, out_stream->time_base);
        enc_pkt.dts = av_rescale_q(pFrameYUV->pts, encodec_ctx->time_base, out_stream->time_base);
        enc_pkt.duration = av_rescale_q(1, encodec_ctx->time_base, out_stream->time_base);
        
        //发送到服务器
        ret = av_interleaved_write_frame(outfmt_ctx, &enc_pkt);
        if (ret < 0) {
            fprintf(stderr, "Error muxing packet\n");
            break;
        }	

        if (outfmt_ctx->pb->buffer_size > MAX_BUFFER_SIZE) 
            avio_flush(outfmt_ctx->pb);

        //查看第几帧
        //printf("%d\n",vpts);
        av_packet_unref(&enc_pkt);

        // 控制帧率
        // int64_t current_time = av_gettime();
        // if (current_time < next_frame_time) {
        // 	av_usleep(next_frame_time - current_time);
        // }
        // next_frame_time += frame_delay;
        // printf("Frame %d sent, pts: %ld, dts: %ld\n", vpts, enc_pkt.pts, enc_pkt.dts);
        
    }
    avio_close(outfmt_ctx->pb);
    avformat_close_input(&infmt_ctx);
    avformat_close_input(&outfmt_ctx);
    return;
}

int rtmp_stream_test()
{
    std::cout << "rtmp_stream_test" << std::endl;
    std::thread rtmp_thread(&rtmp_run);

    if (!rtmp_thread.joinable()) {
        std::cerr << "Failed to create thread" << std::endl;
        return -1;
    }
    //rtmp_thread.join();
    rtmp_thread.detach();
}