#include <iostream>
#include <librealsense2/rs.hpp>
#include <opencv2/opencv.hpp>
#include <opencv2/imgproc.hpp>

extern "C"
{
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libswscale/swscale.h>
#include <libavdevice/avdevice.h>
#include <libavutil/time.h>
}

#include "videoPush.h"
#include "test.h"

// const static int WIDTH = 1920;
// const static int HEIGHT = 1080;
// const static int FPS = 30;

void realsense2Init()
{
    // realsense获取视频流
    rs2::pipeline pipe;
    rs2::config cfg;
    // 配置彩色流
    cfg.enable_stream(RS2_STREAM_COLOR, WIDTH, HEIGHT, RS2_FORMAT_BGR8, FPS);
    pipe.start(cfg);
    pipe.stop();
}

// ffmpeg摄像头取流并显示
void fun1()
{
    /* =================创建输入================== */
    // 注册所有设备
    avdevice_register_all();
    // 获取输入格式
    auto input_fmt = av_find_input_format("video4linux2");
    // 打开输入设备
    AVFormatContext *input_ctx = nullptr;
    avformat_open_input(&input_ctx, "/dev/video4", input_fmt, nullptr);
    // 获取输入流信息
    avformat_find_stream_info(input_ctx, nullptr);
    // 寻找视频流索引
    int video_stream_index = -1;
    for (int i = 0; i < input_ctx->nb_streams; i++)
    {
        if (input_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
        {
            video_stream_index = i;
            break;
        }
    }
    // 获取编解码器上下文
    auto codecContext = avcodec_alloc_context3(nullptr);
    // 设置编解码器上下文参数
    avcodec_parameters_to_context(codecContext, input_ctx->streams[video_stream_index]->codecpar);
    // 查找解码器
    auto codec = avcodec_find_decoder(codecContext->codec_id);
    // 打开解码器
    avcodec_open2(codecContext, codec, nullptr);

    /* =================解码显示================== */
    // 分配AVPacket结构体
    AVPacket *pkt = av_packet_alloc();
    // 初始化SwsContext结构体  yuyv422转RGB
    SwsContext *sws_ctx = sws_getContext(WIDTH, HEIGHT, AV_PIX_FMT_YUYV422, WIDTH, HEIGHT, AV_PIX_FMT_BGR24, SWS_BILINEAR, NULL, NULL, NULL);

    // 持续读取数据并放入pkt
    while (av_read_frame(input_ctx, pkt) >= 0)
    {
        if (pkt->stream_index != video_stream_index)
            continue;

        // 分配AVFrame结构体
        AVFrame *frame = av_frame_alloc();
        // 解码图像帧
        int ret = avcodec_send_packet(codecContext, pkt);

        // 读取解码后的图像帧
        while (ret >= 0)
        {
            ret = avcodec_receive_frame(codecContext, frame);
            if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF || ret < 0)
                break;

            // AV_PIX_FMT_YUYV422转RGB
            AVFrame *rgb_frame = av_frame_alloc();
            rgb_frame->format = AV_PIX_FMT_BGR24;
            rgb_frame->width = frame->width;
            rgb_frame->height = frame->height;
            av_frame_get_buffer(rgb_frame, 0);

            sws_scale(sws_ctx, frame->data, frame->linesize, 0, frame->height, rgb_frame->data, rgb_frame->linesize);

            // 显示图像帧
            cv::Mat mat(rgb_frame->height, rgb_frame->width, CV_8UC3, rgb_frame->data[0]);
            cv::imshow("video4", mat);
            if (cv::waitKey(1) == 'q')
                return;
            av_frame_free(&rgb_frame);
        }

        av_frame_free(&frame);
        av_packet_unref(pkt);
    }

    sws_freeContext(sws_ctx);
    av_packet_free(&pkt);
    avformat_close_input(&input_ctx);
    avcodec_free_context(&codecContext);
    avformat_network_deinit();
    cv::destroyAllWindows();
}

// ffmpeg摄像头取流并推送
void fun2()
{
    avdevice_register_all();
    // 获取输入流
    auto input_fmt = av_find_input_format("video4linux2");
    AVFormatContext *input_ctx = nullptr;
    avformat_open_input(&input_ctx, "/dev/video4", input_fmt, nullptr);
    avformat_find_stream_info(input_ctx, nullptr);
    int video_stream_index = -1;
    for (int i = 0; i < input_ctx->nb_streams; i++)
    {
        if (input_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
        {
            video_stream_index = i;
            break;
        }
    }
    // 获取输入编解码器
    auto input_codec_ctx = avcodec_alloc_context3(nullptr);
    avcodec_parameters_to_context(input_codec_ctx, input_ctx->streams[video_stream_index]->codecpar);
    auto input_codec = avcodec_find_decoder(input_codec_ctx->codec_id);
    avcodec_open2(input_codec_ctx, input_codec, nullptr);

    avformat_network_init();
    // 获取输出流
    AVFormatContext *output_ctx = nullptr;
    avformat_alloc_output_context2(&output_ctx, nullptr, "rtsp", "rtsp://192.168.0.60:8554/stream");
    // 获取输出编解码器
    // auto output_codec = avcodec_find_encoder(AV_CODEC_ID_H264);
    auto output_codec = avcodec_find_encoder_by_name("x264");

    auto output_codec_ctx = avcodec_alloc_context3(output_codec);
    output_codec_ctx->width = WIDTH;
    output_codec_ctx->height = HEIGHT;
    output_codec_ctx->pix_fmt = AV_PIX_FMT_YUV420P;
    output_codec_ctx->time_base = {1, FPS};
    output_codec_ctx->profile = FF_PROFILE_H264_CONSTRAINED_BASELINE;
    // output_codec_ctx->max_b_frames = 0; // B帧为0

    AVDictionary *param = nullptr;
    av_dict_set(&param, "preset", "ultrafast", 0);
    av_dict_set(&param, "tune", "zerolatency", 0);
    avcodec_open2(output_codec_ctx, output_codec, &param);
    av_dict_free(&param);

    // 赋值输出编码器参数到输出流
    auto output_stream = avformat_new_stream(output_ctx, nullptr);
    avcodec_parameters_from_context(output_stream->codecpar, output_codec_ctx);
    output_stream->time_base = {1, FPS};
    output_stream->avg_frame_rate = {1, FPS};

    // 格式转换
    SwsContext *swsContext = sws_getContext(WIDTH, HEIGHT, AV_PIX_FMT_YUYV422, WIDTH, HEIGHT, AV_PIX_FMT_YUV420P, SWS_BILINEAR, nullptr, nullptr, nullptr);
    AVPacket *pkt = av_packet_alloc();
    AVPacket *output_pkt = av_packet_alloc();

    AVFrame *frame = av_frame_alloc();
    AVFrame *output_frame = av_frame_alloc();
    output_frame->format = AV_PIX_FMT_YUV420P;
    output_frame->width = WIDTH;
    output_frame->height = HEIGHT;
    av_frame_get_buffer(output_frame, 32);

    auto start_time = av_gettime();

    av_dump_format(output_ctx, 0, "rtsp://192.168.0.60:8554/stream", 1);
    av_dump_format(input_ctx, 0, "/dev/video4", 0);

    avio_open(&output_ctx->pb, "rtsp://192.168.0.60:8554/stream", AVIO_FLAG_WRITE);
    // 写入输出头文件
    avformat_write_header(output_ctx, nullptr);

    int ret;
    while (true)
    {
        if (av_read_frame(input_ctx, pkt) >= 0 && pkt->stream_index == video_stream_index)
        {
            ret = avcodec_send_packet(input_codec_ctx, pkt);
            if (ret < 0)
                break;
            ret = avcodec_receive_frame(input_codec_ctx, frame);
            if (ret != 0)
                continue;
            sws_scale(swsContext, frame->data, frame->linesize, 0, frame->height, output_frame->data, output_frame->linesize);
            //  传递时间戳
            // output_frame->pts = av_rescale_q(frame->pts, input_codec_ctx->time_base, output_codec_ctx->time_base);
            ret = avcodec_send_frame(output_codec_ctx, output_frame);
            if (ret < 0)
                break;
            ret = avcodec_receive_packet(output_codec_ctx, output_pkt);
            if (ret != 0)
                continue;

            // 计算输出时间戳
            output_pkt->dts = av_rescale_q(pkt->dts, input_ctx->streams[video_stream_index]->time_base, output_stream->time_base);
            output_pkt->pts = output_pkt->dts;

            av_interleaved_write_frame(output_ctx, output_pkt);

            av_packet_unref(pkt);
            av_packet_unref(output_pkt);
        }
    }
    av_frame_free(&output_frame);
    av_frame_free(&frame);
    av_packet_free(&pkt);
    av_packet_free(&output_pkt);

    sws_freeContext(swsContext);

    av_write_trailer(output_ctx);
    avformat_close_input(&input_ctx);
    avformat_free_context(output_ctx);

    avcodec_free_context(&input_codec_ctx);
    avcodec_free_context(&output_codec_ctx);
    avio_close(output_ctx->pb);
    avformat_network_deinit();
}

// realsense2取流并推送
void fun3()
{
}

int main(int argc, char const *argv[])
{
    realsense2Init();
    // Video v;
    // v.init();
    // v.start();
    // while (v.getIsRunning());
    fun2();
    // run();
    return 0;
}
