#include <string>
#include <memory>
#include <iostream>
#include <thread>

using namespace std;

#include "ffmpeg.h"

class SwsScaleContext
{
public:
    SwsScaleContext() {}

    void SetSrcResolution(int width, int height)
    {
        srcWidth = width;
        srcHeight = height;
    }

    void SetDstResolution(int width, int height)
    {
        dstWidth = width;
        dstHeight = height;
    }

    void SetFormat(AVPixelFormat iformat, AVPixelFormat oformat)
    {
        this->iformat = iformat;
        this->oformat = oformat;
    }

    // private:
    int srcWidth;
    int srcHeight;
    int dstWidth;
    int dstHeight;
    AVPixelFormat iformat;
    AVPixelFormat oformat;
};

AVFormatContext *inputformatCtx = nullptr;
AVCodecContext *encodeContext = nullptr;
AVFormatContext *outputContext;
int64_t lastReadPackTime = 0;
int64_t packCnt = 0;

struct SwsContext *pSwsContext = nullptr;
uint8_t *pSwpBuffer = nullptr;

static int interrupt_cb(void *ctx)
{
    int timeout = 3;
    if (av_gettime() - lastReadPackTime > timeout * 1000 * 1000)
    {
        return -1;
    }

    return 0;
}

int OpenInput(const char *inputUrl)
{
    inputformatCtx = avformat_alloc_context();
    lastReadPackTime = av_gettime();
    inputformatCtx->interrupt_callback.callback = interrupt_cb;
    AVInputFormat *inputFmt = av_find_input_format("dshow");
    AVDictionary *options = nullptr;
    av_dict_set_int(&options, "rtbufsize", 18432000, 0);
    int ret = avformat_open_input(&inputformatCtx, inputUrl, inputFmt, &options);
    if (ret < 0)
    {
        av_log(NULL, AV_LOG_ERROR, "avformat_open_input failed \n");
        return ret;
    }

    ret = avformat_find_stream_info(inputformatCtx, nullptr);
    if (ret < 0)
    {
        av_log(NULL, AV_LOG_ERROR, "avformat_find_stream_info failed \n");
    }
    else
    {
        av_log(NULL, AV_LOG_ERROR, "avformat_find_stream_info success[%s] \n", inputUrl);
    }

    return ret;
}

shared_ptr<AVPacket> ReadPacketFromSource()
{
    shared_ptr<AVPacket> packet(static_cast<AVPacket *>(av_malloc(sizeof(AVPacket))), [&](AVPacket *p) {
        av_packet_free(&p);
        cout << "free " << __FUNCTION__ << endl;
        av_freep(&p);
    });
    av_init_packet(packet.get());
    lastReadPackTime = av_gettime();
    int ret = av_read_frame(inputformatCtx, packet.get());
    if (ret >= 0)
    {
        return packet;
    }
    else
    {
        return nullptr;
    }
}

int OpenOutput(const char *outUrl, AVCodecContext *encodeCtx)
{
    int ret = avformat_alloc_output_context2(&outputContext, nullptr, "mp4", outUrl);
    ret = avio_open2(&outputContext->pb, outUrl, AVIO_FLAG_WRITE, nullptr, nullptr);
    if (ret < 0)
    {
        av_log(NULL, AV_LOG_ERROR, "avio_open2 failed \n");
        goto Error;
    }

    for (size_t i = 0; i < inputformatCtx->nb_streams; i++)
    {
        if (inputformatCtx->streams[i]->codec->codec_type == AVMediaType::AVMEDIA_TYPE_AUDIO)
        {
            continue;
        }

        AVStream *stream = avformat_new_stream(outputContext, encodeCtx->codec);
        ret = avcodec_copy_context(stream->codec, encodeContext);
        if (ret < 0)
        {
            av_log(NULL, AV_LOG_ERROR, "avcodec_copy_context failed\n");
            goto Error;
        }
    }

    ret = avformat_write_header(outputContext, nullptr);
    if (ret < 0)
    {
        av_log(NULL, AV_LOG_ERROR, "avformat_write_header failed");
        goto Error;
    }

    av_log(NULL, AV_LOG_FATAL, "Open output file success [%s]\n", outUrl);
    return ret;

Error:
    if (outputContext)
    {
        for (size_t i = 0; i < outputContext->nb_streams; i++)
        {
            avcodec_close(outputContext->streams[i]->codec);
        }

        avformat_close_input(&outputContext);
    }
    return ret;
}

void Init()
{
    av_register_all();
    avdevice_register_all();
    av_log_set_level(AV_LOG_ERROR);
}

void CloseInput()
{
    if (inputformatCtx != nullptr)
    {
        avformat_close_input(&inputformatCtx);
    }

    if (pSwsContext != nullptr)
    {
        sws_freeContext(pSwsContext);
    }
}

void CloseOuput()
{
    if (outputContext != nullptr)
    {
        av_write_trailer(outputContext);
        avformat_close_input(&outputContext);
    }
}

int WritePacket(shared_ptr<AVPacket> packet)
{
    packet->pts = packet->dts = packCnt * (outputContext->streams[0]->time_base.den) / outputContext->streams[0]->time_base.num / 30;
    packCnt++;
    return av_interleaved_write_frame(outputContext, packet.get());
}

int InitDecodeContext(AVStream *inputStream)
{
    auto codecId = inputStream->codec->codec_id; //流 编码器上下文包含（编码器参数和编码器）
    auto codec = avcodec_find_decoder(codecId);  //找到编码器
    if (!codec)
    {
        return -1;
    }

    int ret = avcodec_open2(inputStream->codec, codec, nullptr); //打开编码器
    return ret;
}

/**
 * @brief 
 * 
 * @param inputStream  那解码器的参数来配置 编码器
 * @param encodecCtx 
 * @return int 
 */
int initEncoderCodec(AVStream *inputStream, AVCodecContext **encodecCtx)
{
    AVCodec *codec = avcodec_find_encoder(AV_CODEC_ID_H264);
    (*encodecCtx) = avcodec_alloc_context3(codec); //申请并初始化一个编码器上下文，关于上下文的参数，貌似都是通过  xxx_alloc_xxx 来申请，编解码则使用 xxx_find_xxx 查找
    ///这些参数都是做什么的，你需要知道
    (*encodecCtx)->codec_id = codec->id;
    (*encodecCtx)->has_b_frames = 0; //不编入B帧
    (*encodecCtx)->time_base.num = inputStream->codec->time_base.num;
    (*encodecCtx)->time_base.den = inputStream->codec->time_base.den;
    (*encodecCtx)->pix_fmt = *codec->pix_fmts; //h264 的编码器的格式为 YUV420P
    (*encodecCtx)->width = inputStream->codec->width;
    (*encodecCtx)->height = inputStream->codec->height;
    (*encodecCtx)->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
    int ret = avcodec_open2((*encodecCtx), codec, nullptr);
    if (ret < 0)
    {
        av_log(NULL, AV_LOG_FATAL, "avcodec_open2 failed\n");
        return ret;
    }

    return 1;
}

bool Decode(AVStream *inputStream, AVPacket *packet, AVFrame *frame)
{
    int gotFrame = 0;
    auto hr = avcodec_decode_video2(inputStream->codec, frame, &gotFrame, packet);
    if (hr >= 0 && gotFrame != 0)
    {
        return true;
    }

    return false;
}

shared_ptr<AVPacket> Encode(AVCodecContext *encodeCtx, AVFrame *frame)
{
    int gotOutput = 0;

    shared_ptr<AVPacket> pkt(static_cast<AVPacket *>(av_malloc(sizeof(AVPacket))), [&](AVPacket *p) {
        av_packet_free(&p);
        cout << "free " << __FUNCTION__ << endl;
        av_freep(&p);
    });
    av_init_packet(pkt.get());
    pkt->data = NULL;
    pkt->size = 0;
    int ret = avcodec_encode_video2(encodeCtx, pkt.get(), frame, &gotOutput);
    if (ret >= 0 && gotOutput)
    {
        return pkt;
    }
    else
    {
        return nullptr;
    }
}

int initSwsContext(struct SwsContext **pSwsCxt, SwsScaleContext *swsScaleContext)
{
    *pSwsCxt = sws_getContext(swsScaleContext->srcWidth, swsScaleContext->srcHeight, swsScaleContext->iformat,
                              swsScaleContext->dstWidth, swsScaleContext->dstHeight, swsScaleContext->oformat,
                              SWS_BICUBIC, NULL, NULL, NULL);
    if (pSwsContext == NULL)
    {
        return -1;
    }

    return 0;
}

/**
 * @brief 初始化一个新的AVFrame
 * 
 * @param[out] pSwsFrame 
 * @param iWidth 
 * @param iHeight 
 * @return int 
 */
int initSwsFrame(AVFrame *pSwsFrame, int iWidth, int iHeight)
{
    int numBytes = av_image_get_buffer_size(encodeContext->pix_fmt, iWidth, iHeight, 1); //指定格式 和大小
    pSwpBuffer = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t));
    av_image_fill_arrays(pSwsFrame->data, pSwsFrame->linesize, pSwpBuffer, encodeContext->pix_fmt, iWidth, iHeight, 1);
    pSwsFrame->width = iWidth;
    pSwsFrame->height = iHeight;
    pSwsFrame->format = encodeContext->pix_fmt;
    return 1;
}

int main(void)
{
    SwsScaleContext swsScaleCxt;
    AVFrame *videoFrame = av_frame_alloc();
    AVFrame *pSwsVideoFrame = av_frame_alloc();
    int64_t startTime = av_gettime();

    Init();
    int ret = OpenInput("video=front Camera");
    if (ret < 0)
    {
        goto Error;
    }
    InitDecodeContext(inputformatCtx->streams[0]);

    ret = initEncoderCodec(inputformatCtx->streams[0], &encodeContext);
    if (ret >= 0)
    {
        ret = OpenOutput("D:\\usb.mp4", encodeContext);
    }

    if (ret < 0)
    {
        goto Error;
    }

    swsScaleCxt.SetSrcResolution(inputformatCtx->streams[0]->codec->width, inputformatCtx->streams[0]->codec->height);
    swsScaleCxt.SetDstResolution(encodeContext->width, encodeContext->height);
    swsScaleCxt.SetFormat(inputformatCtx->streams[0]->codec->pix_fmt, encodeContext->pix_fmt);
    initSwsContext(&pSwsContext, &swsScaleCxt);                                //创建一个 SwsContext
    initSwsFrame(pSwsVideoFrame, encodeContext->width, encodeContext->height); //创建一个新的AVFrame 用于存放sws 后的 videoFrame

    while (true)
    {
        auto packet = ReadPacketFromSource();
        if (av_gettime() - startTime > 30 * 1000 * 1000)
        {
            break;
        }

        if (packet && packet->stream_index == 0)
        {
            if (Decode(inputformatCtx->streams[0], packet.get(), videoFrame))
            {
                sws_scale(pSwsContext,
                          (const uint8_t *const *)videoFrame->data, videoFrame->linesize,
                          0, inputformatCtx->streams[0]->codec->height,
                          (uint8_t *const *)pSwsVideoFrame->data, pSwsVideoFrame->linesize);
                auto packetEncode = Encode(encodeContext, pSwsVideoFrame);
                if (packetEncode)
                {
                    ret = WritePacket(packetEncode);
                    cout << "ret:" << ret << endl;
                }
            }
        }
    }

    cout << "Get Picture End " << endl;
    av_frame_free(&videoFrame);
    avcodec_close(encodeContext);
    av_frame_free(&pSwsVideoFrame);

Error:
    CloseInput();
    CloseOuput();

    return 0;
}