/*
 * @Description:
 * @Version: 2.0
 * @Autor: xudongxu
 * @Date: 2019-12-05 14:18:22
 * @LastEditors: xudongxu
 * @LastEditTime: 2019-12-08 10:42:48
 */
#include <future>
#include <iostream>
#include <map>
#include <opencv2/opencv.hpp>
#include <regex>
#include <signal.h>
#include <sys/time.h>
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
}
#include <unistd.h>
using namespace cv;
using namespace std;
#define ENABLE 1

std::map<std::string, std::vector<cv::Point>> configmap{
    {"10.0.0.41", std::vector<cv::Point>{cv::Point( 420, 125 ), cv::Point( 1650, 1035 )}},
    {"10.0.0.42", std::vector<cv::Point>{cv::Point( 400, 95 ), cv::Point( 1610, 995 )}},
    {"10.0.0.43", std::vector<cv::Point>{cv::Point( 405, 110 ), cv::Point( 1610, 1010 )}},
    {"10.0.0.44", std::vector<cv::Point>{cv::Point( 685, 355 ), cv::Point( 1330, 825 )}},
    {"10.0.0.91", std::vector<cv::Point>{cv::Point( 340, 120 ), cv::Point( 1560, 1030 )}},
    {"10.0.0.92", std::vector<cv::Point>{cv::Point( 330, 125 ), cv::Point( 1540, 1030 )}},
    {"10.0.0.93", std::vector<cv::Point>{cv::Point( 360, 80 ), cv::Point( 1565, 975 )}},
    {"10.0.0.94", std::vector<cv::Point>{cv::Point( 325, 90 ), cv::Point( 1540, 980 )}},
    {"10.0.0.95", std::vector<cv::Point>{cv::Point( 305, 45 ), cv::Point( 1505, 955 )}},
    {"10.0.0.96", std::vector<cv::Point>{cv::Point( 420, 95 ), cv::Point( 1625, 985 )}},
    {"10.0.1.5", std::vector<cv::Point>{cv::Point( 325, 75 ), cv::Point( 1530, 965 )}},
    {"10.0.1.6", std::vector<cv::Point>{cv::Point( 345, 50 ), cv::Point( 1545, 950 )}},
};

std::string get_ip( const std::string& url ) {
    std::string ip;
    std::regex ip_reg( ".*@(((2[0-4]\\d|25[0-5]|[01]?\\d\\d?)\\.){3}(2[0-4]\\d|25[0-5]|[01]?\\d\\d?)):.*" );
    std::smatch matchResult;
    //正则匹配
    if ( std::regex_match( url, matchResult, ip_reg ) ) {
        for ( size_t i = 1; i < matchResult.size(); ++i ) {
            ip = matchResult[1];
        }
        return ip;
    } else {
        return "";
    }
}

int averror( int ret ) {
    if ( ret < 0 ) {
        char buf[512];
        av_strerror( ret, buf, sizeof( buf ) - 1 );
        fprintf( stderr, "%s\n", buf );
    }
    return ret;
}

/**
 * @description: 双目摄像机
 * @param {type}
 * @return:
 * @author: xudongxu
 */
class CameraGroup {
  public:
    CameraGroup() = default;
    CameraGroup( const std::vector<std::string>& urllist ) {
        if ( urllist.size() != 2 ) {
            throw std::logic_error( "urllist size shoud be 2,first should be "
                                    "bgrurl,second should be irurl." );
        }
        if ( !open( urllist ) ) {
            throw std::logic_error( "open failed!" );
        }
    }

    bool open( const std::vector<std::string>& urllist ) {
        if ( urllist.size() != 2 ) {
            return false;
        }
        isopen_bgr = cambgr.open( urllist[0].c_str() );
        isopen_ir = camir.open( urllist[1].c_str() );
        return isopen_bgr && isopen_ir;
    }

    bool isOpened() {
        std::cerr << "BGR Camera status: " << isopen_bgr << std::endl;
        std::cerr << "IR Camera status: " << isopen_ir << std::endl;
        return isopen_bgr && isopen_ir;
    }

    bool read() {
        bool re = false;
        re = cambgr.read( frame_bgr );
        re &= camir.read( frame_ir );
        return re;
    }

    void release() {
        cambgr.release();
        camir.release();
    }

    cv::Mat& bgrframe() { return frame_bgr; }

    cv::Mat& irframe() { return frame_ir; }

    int get_irwidth() { return camir.get( CAP_PROP_FRAME_WIDTH ); }

    int get_irheight() { return cambgr.get( CAP_PROP_FRAME_HEIGHT ); }

    int get_fps() { return camir.get( CAP_PROP_FPS ); }

  private:
    bool isopen_bgr, isopen_ir;
    cv::VideoCapture cambgr, camir;
    Mat frame_bgr, frame_ir;
};

void imagehandle( cv::Mat& resimg, cv::Mat& bgrimg, cv::Mat& irimg, std::string ip, cv::Point left_top,
                  cv::Point right_bottom );

bool stopcmd = false;
void my_exithandle( int signo ) {
    std::cerr << "rtmp stream stop..." << std::endl;
    stopcmd = true;
}

int main( int argc, char* argv[] ) {
    // std::vector<std::string> inUrl{"rtsp://admin:ad53937301@192.168.1.64:554/h264/ch1/main/av_stream",
    //                                "rtsp://admin:ad53937301@192.168.1.64:554/h264/ch33/main/av_stream"};
    // const char* outUrl = "rtmp://192.168.1.167/live/stream";
    signal( SIGINT, my_exithandle );

    if ( argc != 8 ) {
        std::cout << "usage: ./rtmp-stream-pusher bgrurl irurl rtmpurl xmin ymin xmax ymax\n";
        return 0;
    }
    std::cout << "argv[0]" << argv[0] << std::endl;
    std::cout << "argv[1]" << argv[1] << std::endl;
    std::cout << "argv[2]" << argv[2] << std::endl;
    std::cout << "argv[3]" << argv[3] << std::endl;
    std::cout << "argv[4]" << argv[4] << std::endl; // xmin
    std::cout << "argv[5]" << argv[5] << std::endl; // ymin
    std::cout << "argv[6]" << argv[6] << std::endl; // xmax
    std::cout << "argv[7]" << argv[7] << std::endl; // ymax

    std::vector<std::string> inUrl{argv[1], argv[2]};
    const char* outUrl = argv[3];

    cv::Point left_top(atoi(argv[4]),atoi(argv[5]));
    cv::Point right_bottom( atoi( argv[6] ), atoi( argv[7] ) );


    // std::vector<std::string> inUrl{"rtsp://admin:ad53937301@192.168.1.64:554/h264/ch1/main/av_stream",
    //                               "rtsp://admin:ad53937301@192.168.1.64:554/h264/ch33/main/av_stream"};
    // const char* outUrl = "rtmp://192.168.1.214/live/stream";

    avcodec_register_all();
    // register all MUX
    av_register_all();
    // register all net protocal
    avformat_network_init();

    // VideoCapture cam;
    CameraGroup cam;
    Mat frame;
    // Frame fromat converting context
    SwsContext* vsc = NULL;

    // Output data structure--YUV
    AVFrame* yuv = NULL;

    // Encoder context
    AVCodecContext* vc = NULL;

    // rtmp flv MUX
    AVFormatContext* ic = NULL;

    try {
        /////////////////////////////////////////////////////////////////////////////////////
        /// 1. Open Cam
        cam.open( inUrl );
        if ( !cam.isOpened() ) {
            std::cerr << "cam open failed\n";
            return -1;
        }
        cout << "cam open sucess" << endl;
        /// 2. Init SwsContext

        int inWidth = cam.get_irwidth();
        int inHeight = cam.get_irheight();
        int fps = cam.get_fps();
        if ( fps == 0 ) {
            fps = 25;
        }
        cout << fps << endl;

        vsc = sws_getCachedContext( vsc, inWidth, inHeight, AV_PIX_FMT_BGR24, inWidth, inHeight, AV_PIX_FMT_YUV420P,
                                    SWS_BICUBIC, 0, 0, 0 );
        if ( !vsc ) {
            std::cerr << "sws_getCachedContext failed\n";
        }

        /// 3. Init Output data structure--YUV
        yuv = av_frame_alloc();
        yuv->format = AV_PIX_FMT_YUV420P;
        yuv->width = inWidth;
        yuv->height = inHeight;
        yuv->pts = 0;
        // allocate the space for the data of YUV
        int ret = av_frame_get_buffer( yuv, 32 );
        averror( ret );

//-==========================================================================================================================
//将输入流的编码配置拷贝到编码器中
#if ( ENABLE )
        std::cout << " ====>copy codec<==== " << std::endl;
        AVFormatContext* ifmt_ctx = nullptr;
        ret = avformat_open_input( &ifmt_ctx, inUrl[1].c_str(), nullptr, nullptr );
        averror( ret );
        ret = avformat_find_stream_info( ifmt_ctx, NULL );
        averror( ret );

        int videostream_index = av_find_best_stream( ifmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0 );
        averror( ret );
        AVStream* instream = ifmt_ctx->streams[videostream_index];
        //查找解码器
        AVCodec* codec = avcodec_find_encoder( instream->codecpar->codec_id );
        if ( codec == NULL ) {
            averror( 0 );
        }
        AVCodecParameters* codecpar = avcodec_parameters_alloc();
        ret = avcodec_parameters_copy( codecpar, instream->codecpar ); //赋值配置信息
        averror( ret );
        codecpar->codec_tag = 0;
        vc = avcodec_alloc_context3( codec );
        ret = avcodec_parameters_to_context( vc, codecpar );
        vc->time_base = {1, fps}; // used to calculate pts: pts*time_base = second
        averror( ret );
        avformat_close_input( &ifmt_ctx );
#endif
        //-==========================================================================================================================

        // d. Open encodder context
        ret = avcodec_open2( vc, 0, 0 );
        averror( ret );
        cout << "avcodec_open2 successed!" << endl;

        /// 5. Config MUX and stream for output
        // a. Create context for MUX
        ret = avformat_alloc_output_context2( &ic, 0, "flv", outUrl );
        averror( ret );
        // b. Add video stream
        AVStream* vs = avformat_new_stream( ic, NULL );
        if ( !vs ) {
            std::cerr << "avformat_new_stream failed\n";
            return -1;
        }
        // vs->codecpar->codec_tag = 0;
        avcodec_parameters_copy( vs->codecpar, codecpar );

        // copy  parameter from Encoder to MUX
        avcodec_parameters_from_context( vs->codecpar, vc );
        av_dump_format( ic, 0, outUrl, 1 );

        /// 6. Open rtmp output IO
        ret = avio_open( &ic->pb, outUrl, AVIO_FLAG_WRITE );
        averror( ret );
        // write mux header
        ret = avformat_write_header( ic, NULL ); // after this operation the stream's time_base will also be
                                                 // changed, not vc->time_base anymore
        averror( ret );

        AVPacket pack;
        memset( &pack, 0, sizeof( pack ) );
        int vpts = 0;
        // int mycounter = 0;//计数器
        for ( ;; ) {
            if ( stopcmd )
                break;
            if ( !cam.read() ) {
                std::cerr << "read error image...." << std::endl;
                continue;
            }
            // if ( mycounter++ % 2 == 0 )
            //    continue;

            imagehandle( frame, cam.bgrframe(), cam.irframe(), get_ip( std::string( argv[1] ) ) ,left_top,right_bottom);
            /// convert RGB to YUV
            // Input data structure--RGB
            uint8_t* indata[AV_NUM_DATA_POINTERS] = {0}; // srcStride
            indata[0] = frame.data;
            int inlinesize[AV_NUM_DATA_POINTERS] = {0}; // srcSlice
            //
            // inlinesize[0] = inWidth * 3;
            inlinesize[0] = frame.cols * frame.elemSize();

            int h = sws_scale( vsc, indata, inlinesize, 0, frame.rows, yuv->data, yuv->linesize );
            if ( h <= 0 ) {
                continue;
            }

            // cout << h << " " << flush;

            /// Mux YUV to flv h.264
            yuv->pts = vpts;
            vpts++;
            ret = avcodec_send_frame( vc, yuv );
            if ( ret != 0 ) {
                continue;
            }
            ret = avcodec_receive_packet( vc, &pack );
            if ( ret != 0 || pack.size > 0 ) {
                // cout << '*' <<pack.size<< flush;
            } else {
                continue;
            }

            /// Push stream
            pack.pts = av_rescale_q( pack.pts, vc->time_base, vs->time_base );
            pack.dts = av_rescale_q( pack.dts, vc->time_base, vs->time_base );
            ret = av_interleaved_write_frame( ic, &pack );
            if ( ret == 0 ) {
                cout << '#' << flush;
            }
        }
    } catch ( exception& ex ) {

        // if ( cam.isOpened() )
        //    cam.release();
        if ( vsc ) {
            sws_freeContext( vsc );
            vsc = NULL;
        }
        if ( vc ) {
            avio_closep( &ic->pb );
            avcodec_free_context( &vc );
        }
        cerr << ex.what() << endl;
    }
    getchar();

    return 0;
}

/**
 * @description: 融合
 * @param bgr:正常图片 ir:红外
 * @return:
 * @author: xudongxu
 */
int fusion( cv::Mat& bgr, cv::Mat& ir, cv::Mat& fusion_bgr, cv::Point bgr_pt1, cv::Point bgr_pt2 ) {
    try {
        // double start_time = GetWallTime();
        // Get sizes
        int bgr_height = bgr.rows;
        int bgr_width = bgr.cols;
        int ir_height = ir.rows;
        int ir_width = ir.cols;

        // Choose two points from bgr image
        // cv::Point bgr_pt1( 425, 160 );
        // cv::Point bgr_pt2( bgr_width - 354, bgr_height - 72 );
        // cv::Point bgr_pt1( 219, 50 );
        // cv::Point bgr_pt2( 1449, 921 );

        // Defined corners
        std::vector<Point2f> bgr_corners;
        bgr_corners.push_back( Point2f( bgr_pt1.x, bgr_pt1.y ) );
        bgr_corners.push_back( Point2f( bgr_pt2.x, bgr_pt1.y ) );
        bgr_corners.push_back( Point2f( bgr_pt2.x, bgr_pt2.y ) );
        bgr_corners.push_back( Point2f( bgr_pt1.x, bgr_pt2.y ) );
        std::vector<Point2f> ir_corners;
        ir_corners.push_back( Point2f( 0, 0 ) );
        ir_corners.push_back( Point2f( ir_width - 1, 0 ) );
        ir_corners.push_back( Point2f( ir_width - 1, ir_height - 1 ) );
        ir_corners.push_back( Point2f( 0, ir_height - 1 ) );

        cv::Mat h = cv::findHomography( bgr_corners, ir_corners, cv::RANSAC, 10 );
        cv::Mat bgr_crop;
        cv::warpPerspective( bgr, bgr_crop, h, cv::Size( ir_width, ir_height ) );
        cv::Mat y_crop;
        cv::cvtColor( bgr_crop, y_crop, cv::COLOR_BGR2GRAY );
        cv::Mat ir_ycrcb;
        cv::cvtColor( ir, ir_ycrcb, cv::COLOR_BGR2YCrCb );

        cv::resize( y_crop, y_crop, cv::Size( ir_ycrcb.cols, ir_ycrcb.rows ) );

        std::vector<Mat> ir_channels;
        cv::split( ir_ycrcb, ir_channels ); //利用vector对象分离

        //对拆分的通道数据合并
        Mat new_channels[3] = {y_crop, ir_channels[1], ir_channels[2]}; //定义合并的输出矩阵
        Mat merged_image;
        cv::merge( new_channels, 3, merged_image );
        cv::cvtColor( merged_image, fusion_bgr, cv::COLOR_YCrCb2BGR );
        //tmp
        Point p1( 700, 30 );
        Point p2( 784, 118 );
        rectangle( fusion_bgr, p1, p2, Scalar( 160, 32, 240 ), 2, 8, 0 );
        return 0;
    } catch ( const cv::Exception& e ) {
        std::cerr << e.what() << '\n';
        return -1;
    } catch ( const std::exception& e ) {
        std::cerr << e.what() << '\n';
        return -1;
    } catch ( ... ) {
        return -1;
    }
}

/**
 * @description: 图像处理回调
 * @param {type}
 * @return:
 * @author: xudongxu
 */
void imagehandle( cv::Mat& resimg, cv::Mat& bgrimg, cv::Mat& irimg, std::string ip,cv::Point left_top,cv::Point right_bottom ) {
    std::cout << std::to_string( (long)time( NULL ) ) << " fusion:" << std::endl;
    std::cout << "bgr frame width = " << bgrimg.cols << ",height= " << bgrimg.rows << std::endl;
    std::cout << "ir frame width = " << irimg.cols << ",height= " << irimg.rows << std::endl;
    fusion( bgrimg, irimg, resimg, left_top, right_bottom );

    //if ( configmap.find( ip ) != configmap.end() ) {
    //    fusion( bgrimg, irimg, resimg, configmap[ip][0], configmap[ip][1] );
    //} else {
    //    cv::Point bgr_pt1( 219, 50 );
    //    cv::Point bgr_pt2( 1449, 921 );
    //    fusion( bgrimg, irimg, resimg, bgr_pt1, bgr_pt2 );
    //}
}