//
// Created by xiexiangping on 23-04-17
//
#include "videoCollectionEncode.h"

bool videoCollectionEncode::initStream() {

    device = "video=Integrated Camera";

    fmtCtx = avformat_alloc_context();
    const AVInputFormat* inputFormat = av_find_input_format("dshow");

    if (avformat_open_input(&fmtCtx, device.c_str(), inputFormat, NULL) != 0) {
        printf("打开设备失败\n");
        return false;
    } else {
        printf("成功打开设备\n");
    }

    if (avformat_find_stream_info(fmtCtx, NULL) < 0) {
        printf("fail\n");
        return false;
    } else {
        printf("成功获取音频流\n");
    }


    //av_dump_format(vFmtCtx, 0, "video device", 0);    // 打印输入文件信息

    int videoStreamIndex = -1;

    for (int i = 0; i < fmtCtx->nb_streams; ++i) {
        if (fmtCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO || fmtCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
            videoStreamIndex = 0;
        }
    }

    if (videoStreamIndex != 0) {
        printf("无输入\n");
        return false;
    }

    const AVCodec* codec = avcodec_find_decoder(fmtCtx->streams[0]->codecpar->codec_id);

    // 根据解码器名称查找解码器
    if (!codec) {
        printf("Cannot find decoder!\n");
        return false;
    }
    else {
        printf("找到解码器\n");
    }

    pCodecCtx = avcodec_alloc_context3(codec);
    //将AVCodecParameters的值复制到AVCodecContext 中
    if (avcodec_parameters_to_context(pCodecCtx, fmtCtx->streams[0]->codecpar) < 0) {
        printf("将AVCodecParameters的值复制到AVCodecContext中错误!\n");
        return false;
    }
    else {
        printf("已将fmtCtx->streams[0]->codecpar的值复制到pVideoCodecCtx中\n");
    }


    if (avcodec_open2(pCodecCtx, codec, NULL)) {
        printf("打开解码器失败\n");
        return false;
    }
    else {
        printf("打开解码器成功\n");
    }

    //视频编码器H264
    const AVCodec* encoder = avcodec_find_encoder(AV_CODEC_ID_H264);
    encoderCtx = avcodec_alloc_context3(encoder);
    encoderCtx->width = 1280;
    encoderCtx->height = 720;
    encoderCtx->time_base = {1, 25}; // 设置帧率为25
    encoderCtx->framerate = {25, 1};
    encoderCtx->bit_rate = 400000; // 设置比特率为4Mbps
    encoderCtx->gop_size = 10; // 设置关键帧间隔为10帧
    encoderCtx->max_b_frames = 3;
    encoderCtx->pix_fmt = AV_PIX_FMT_YUV420P; // 设置像素格式为YUV420P
    //av_opt_set(encoderCtx->priv_data, "preset", "ultrafast", 0); // 设置编码器的预设参数为ultrafast

    if (avcodec_open2(encoderCtx, encoder, NULL) < 0) {
        printf("Could not open codec\n");
        return  false;
    }

    return true;
}

void videoCollectionEncode::read() {
    int ret;
    AVPacket* pPacket = av_packet_alloc();
    AVFrame* pFrame = av_frame_alloc();
    while(true) {

        if (av_read_frame(fmtCtx, pPacket) < 0) { // 读取音视频数据包
            printf("读取到结尾处\n");
            break;
        }

        //视频流
        if (pPacket->stream_index == 0) {
            //printf("采集到视频数据\n");

            ret = avcodec_send_packet(pCodecCtx, pPacket);

            //std::cout << "send:" << ret << std::endl;

            if (ret < 0) {
                printf("错误：将音频数据包送入解码器解码失败\n");
                break;
            }

            while (ret >= 0) {
                ret = avcodec_receive_frame(pCodecCtx, pFrame);
                //                if(type == AVMEDIA_TYPE_AUDIO)
                //                    std::cout << "receive:" << ret << std::endl;
                if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
                    break;
                }
                else if (ret < 0) {
                    printf("错误：获取音频解码帧失败\n");
                    break;
                }

//AVPixelFormat
//std::cout << "pFrame format:" << pFrame->format << std::endl;
//std::cout << "pFrame:" << pFrame->width << "," << pFrame->height << std::endl;

#if DEBUG_VIDEO
                cv::Mat cv_mat;
                cv_mat = frameOperate::avframe_to_cvmat_yuyv(pFrame);     //将AVFrame转换为Mat
                cv::imshow("video", cv_mat);
                cv::waitKey(1);
#endif
    //编码
                AVFrame* dst = convert_yuyv422_to_yuv420p(pFrame);
                AVPacket *pkt;
                pkt = encodeVideoFrame(encoderCtx, dst);

                if(pkt == NULL || pkt->size == 0)
                    break;

//                std::cout << "size : " << pkt->size << std::endl;
//                std::cout << "flags : " << pkt->flags << std::endl;

                av_packet_unref(pkt);
            }
        }


//                calFrameFps();
//                printf("video fps = %f\n", frameFps);

        av_packet_unref(pPacket);
    }

    avcodec_free_context(&pCodecCtx);
    avformat_close_input(&fmtCtx);
}


AVFrame* videoCollectionEncode::convert_yuyv422_to_yuv420p(AVFrame* src_frame) {
        // Allocate a new frame to hold the converted data
        AVFrame* dst_frame = av_frame_alloc();
        if (!dst_frame) {
            return NULL;
        }

        // Set the parameters of the destination frame to match the source frame
        dst_frame->format = AV_PIX_FMT_YUV420P;
        dst_frame->width = src_frame->width;
        dst_frame->height = src_frame->height;

        // Allocate memory for the destination frame
        int ret = av_frame_get_buffer(dst_frame, 0);
        if (ret < 0) {
            av_frame_free(&dst_frame);
            return NULL;
        }

        // Create a sws_context for the conversion
        SwsContext* sws_ctx = sws_getContext(src_frame->width, src_frame->height, AV_PIX_FMT_YUYV422,
                                             dst_frame->width, dst_frame->height, AV_PIX_FMT_YUV420P,
                                             SWS_BILINEAR, NULL, NULL, NULL);
        if (!sws_ctx) {
            av_frame_free(&dst_frame);
            return NULL;
        }

        // Convert the pixel format from YUYV422 to YUV420P
        sws_scale(sws_ctx, src_frame->data, src_frame->linesize, 0, src_frame->height,
                  dst_frame->data, dst_frame->linesize);

        // Free the sws_context
        sws_freeContext(sws_ctx);

        return dst_frame;
}


AVPacket *videoCollectionEncode::encodeVideoFrame(AVCodecContext *codecCtx, AVFrame *frame) {
        AVPacket *pkt = av_packet_alloc();

        if (!pkt) {
            printf("could not allocate the packet\n");
            return NULL;
        }

        //    if (frame->pts != AV_NOPTS_VALUE) {
        //        pkt->pts = av_rescale_q(frame->pts, codecCtx->time_base, codecCtx->time_base);
        //    }
        //    if (frame->pkt_dts != AV_NOPTS_VALUE) {
        //        pkt->dts = av_rescale_q(frame->pkt_dts, codecCtx->time_base, codecCtx->time_base);
        //    }

        int ret = avcodec_send_frame(codecCtx, frame);
        if (ret < 0) {
            //std::cout << "send : " << ret << std::endl;
            return NULL;
        }
        else {
            ret = avcodec_receive_packet(codecCtx, pkt);
            if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
                // 继续发送帧
                //std::cout << "jixu receive ret : " << ret << std::endl;
                return NULL;
            }
            else if (ret < 0) {
                // 编码失败
                //std::cout << "fail receive ret : " << ret << std::endl;
                return NULL;
            }
            else {
                // 编码成功，packet即为编码后的h264包
                //std::cout << "h264 receive ret : " << ret << std::endl;
            }
        }

        //    std::cout << "size : " << pkt->size << std::endl;
        //    std::cout << "flags : " << pkt->flags << std::endl;
        //    std::cout << "dts : " << pkt->dts << std::endl;
        //    std::cout << "pts : " << pkt->pts << std::endl;
        return pkt;
}

std::vector<std::string> baseCollectionEncode::enumAudioVideoDevices() {
        std::vector<std::string> devices;
        CoInitialize(NULL);
        ICreateDevEnum* pCreateDevEnum = NULL;
        HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
                                      IID_ICreateDevEnum, (void**)&pCreateDevEnum);
        if (SUCCEEDED(hr)) {
            IEnumMoniker* pEnumMoniker = NULL;
            hr = pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumMoniker, 0);
            if (SUCCEEDED(hr)) {
                IMoniker* pMoniker = NULL;
                ULONG cFetched = 0;
                while (pEnumMoniker->Next(1, &pMoniker, &cFetched) == S_OK) {
                IPropertyBag* pPropertyBag = NULL;
                hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)&pPropertyBag);
                if (SUCCEEDED(hr)) {
                    VARIANT var;
                    VariantInit(&var);
                    hr = pPropertyBag->Read(L"FriendlyName", &var, 0);
                    if (SUCCEEDED(hr)) {
                        std::wstring ws(var.bstrVal, SysStringLen(var.bstrVal));
                        std::string deviceName(ws.begin(), ws.end());
                        devices.push_back(deviceName);
                        VariantClear(&var);
                    }
                    pPropertyBag->Release();
                }
                pMoniker->Release();
                }
                pEnumMoniker->Release();
            }
            hr = pCreateDevEnum->CreateClassEnumerator(CLSID_AudioInputDeviceCategory, &pEnumMoniker, 0);
            if (SUCCEEDED(hr)) {
                IMoniker* pMoniker = NULL;
                ULONG cFetched = 0;
                while (pEnumMoniker->Next(1, &pMoniker, &cFetched) == S_OK) {
                IPropertyBag* pPropertyBag = NULL;
                hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**)&pPropertyBag);
                if (SUCCEEDED(hr)) {
                    VARIANT var;
                    VariantInit(&var);
                    hr = pPropertyBag->Read(L"FriendlyName", &var, 0);
                    if (SUCCEEDED(hr)) {
                        char name[256];
                        //std::wstring ws(var.bstrVal, SysStringLen(var.bstrVal));
                        //std::string deviceName(ws.begin(), ws.end());
                        WideCharToMultiByte(CP_ACP, 0, var.bstrVal, -1, name, 256, NULL, NULL);
                        devices.push_back(std::string(name));
                        VariantClear(&var);
                    }
                    pPropertyBag->Release();
                }
                pMoniker->Release();
                }
                pEnumMoniker->Release();
            }
            pCreateDevEnum->Release();
        }
        CoUninitialize();
        return devices;
}
