﻿#include "pch.h"
#include <iostream>

#include <DirectXHelper.cpp>

#ifdef _MSC_VER
#include <Windows.h>
#pragma warning(disable : 4819)
#pragma warning(disable : 4996)
#endif

#include <stdio.h>
#include <assert.h>
#include <math.h>
#include <time.h>

extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/avutil.h>
#include <libavutil/imgutils.h>

#include <libswscale/swscale.h>
}

#ifdef __MINGW32__
#undef main /* Prevents SDL from overriding main() */
#endif

#ifdef _MSC_VER
#undef main
#endif

#ifdef _MSC_VER
#pragma warning(disable : 4244)
#pragma comment(lib, "avformat")
#pragma comment(lib, "avcodec")
//#pragma comment(lib, "avdevice")
//#pragma comment(lib, "avfilter")
#pragma comment(lib, "avutil")
//#pragma comment(lib, "swresample")
#pragma comment(lib, "swscale")
#endif

//// compatibility with newer API
//#if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(55,28,1)
//#define av_frame_alloc avcodec_alloc_frame
//#define av_frame_free avcodec_free_frame
//#endif

////////////////////////////////////////////////////////////////////////////////
int FFmpeg_Code01(int argc, char * argv[]);
////////////////////////////////////////////////////////////////////////////////

int get_error_msg(int code, std::string& msg) {
    std::string str_err_msg; 
    str_err_msg.resize(256);
    // 错误码 有时可以返回 -2，表示文件没找到
    // ● 可以判断大小范围，如果是“系统错误值范围”的话，可以用 AVERROR(e) 来取得正值
    //
    int ret = av_strerror(code, (char*)str_err_msg.data(), str_err_msg.length());
    msg = str_err_msg.c_str();
    return ret;
}

void GetMem()
{
    MEMORYSTATUSEX WinMemStat;
    WinMemStat.dwLength = sizeof(MEMORYSTATUSEX);
    ::GlobalMemoryStatusEx(&WinMemStat);

    printf("WinMemStat.ullTotalPhys            = %16llu\n", WinMemStat.ullTotalPhys);
    printf("WinMemStat.ullAvailPhys            = %16llu\n", WinMemStat.ullAvailPhys);
    //printf("Now Used                           = %16llu\n", (int64_t)(WinMemStat.ullTotalPhys - WinMemStat.ullAvailPhys));
    printf("WinMemStat.ullTotalVirtual         = %16llu\n", WinMemStat.ullTotalVirtual);
    printf("WinMemStat.ullAvailVirtual         = %16llu\n", WinMemStat.ullAvailVirtual);
    printf("Now Used                           = %16llu\n", (int64_t)(WinMemStat.ullTotalVirtual - WinMemStat.ullAvailVirtual));
}

int main(int argc, char * argv[])
{
    int ret = 0;

    do
    {
#if 1
        printf("Press Enter to next loop.\n");
        getchar();
#endif

        ret = FFmpeg_Code01(argc, argv);

    } while (true);

    return ret;
}

int CreateAndFillFrame(AVFrame** ppFrameYUV, int width, int height, const uint8_t *pBuf)
{
    int ret = -1;
    //frame = av_frame_alloc();
    *ppFrameYUV = av_frame_alloc();
    //int width = vCodecCtx->width;
    //int height = vCodecCtx->height;
    int bytes = 0;
    std::string errorMessage;

    do
    {
#if 0
        // old API
        //bytes = avpicture_get_size(AV_PIX_FMT_YUV420P, width, height);
        //pBuf = (uint8_t*)av_malloc(bytes);
        //if (!pBuf) {
        //    printf("av_malloc failed.\n");
        //    break;
        //}
        ret = avpicture_fill((AVPicture*)*ppFrameYUV, pBuf, AV_PIX_FMT_YUV420P, width, height);
        if (ret < 0) {
            get_error_msg(ret, errorMessage);
            printf("avpicture_fill failed, %s\n", errorMessage.c_str());
            break;
        }
        //printf("avpicture_fill succeeded\n");
#else
        // new API
        //bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, width, height, 32);
        //pBuf = (uint8_t *)av_malloc(bytes * sizeof(uint8_t));
        //if (!pBuf) {
        //    printf("av_malloc failed.\n");
        //    break;
        //}
        ret = av_image_fill_arrays((*ppFrameYUV)->data, (*ppFrameYUV)->linesize, pBuf, AV_PIX_FMT_YUV420P, width, height, 32);
        if (ret < 0) {
            get_error_msg(ret, errorMessage);
            printf("av_image_fill_arrays failed, %s\n", errorMessage.c_str());
            break;
        }
        //printf("av_image_fill_arrays succeeded\n");
#endif

        ret = 0;
    } while (false);

    return ret;
}

int FFmpeg_Code01(int argc, char * argv[])
{
    int ret = -1;
    int cnt = 0;
    AVFormatContext* pFormatCtx = NULL;
    AVDictionary* pDictionary = NULL;
    AVPacket packet;
    AVFrame *frame = NULL, *frameYUV = NULL;
    uint8_t *pBuf = NULL;
    // 图片像素格式的转换, 图片的拉伸等工作
    struct SwsContext* swsCtx = NULL;

    bool videoCodecOpened = false;
    AVCodecContext* vCodecCtx = NULL;

    const char* path = argv[1];
    std::string errorMessage;

    //path = "http://ivi.bupt.edu.cn/hls/cctv1hd.m3u8";

    FILE *fpY = NULL;
    FILE *fpU = NULL;
    FILE *fpV = NULL;

    do
    {
        //printf("%s\n\n\n", avcodec_configuration());

#if 0
        av_register_all();
        //avcodec_register_all();
#endif

#if 0
        cnt = av_dict_count(pDictionary);
        ret = av_dict_set(&pDictionary, "rtsp_transport", "tcp", 0);
        cnt = av_dict_count(pDictionary);
        ret = av_dict_set(&pDictionary, "max_delay", "500", 0);
        cnt = av_dict_count(pDictionary);
#endif

#if 0
        AVDictionaryEntry *dicEntry = av_dict_get(pDictionary, "max_delay", NULL, 0);
        AVDictionaryEntry *dicEntry2 = av_dict_get(pDictionary, "rtsp_transport", NULL, 0);
        // 查不到
        AVDictionaryEntry *dicEntry3 = av_dict_get(pDictionary, "max_delay", dicEntry, 0);
        // 可以查到，可能根据 av_dict_set 顺序，基于先查到的返回值，可以查找后插入的 entry
        AVDictionaryEntry *dicEntry4 = av_dict_get(pDictionary, "max_delay", dicEntry2, 0);
#endif

        // 打开【文件】
        ret = avformat_open_input(&pFormatCtx, path, NULL, &pDictionary);
        if (ret != 0) {
            get_error_msg(ret, errorMessage);
            printf("avformat_open_input failed, %s\n", errorMessage.c_str());
            break;
        }
        // ⭕ 能得到：【数据流个数: nb_streams】【时长(单位微秒μs): duration】
        // 
        // ⭕ pFormatCtx->streams[<videoStreamIndex>]->codec 可以得到:
        //    bit_rate
        //    bit_rate_tolerance
        //    width
        //    height
        printf("avformat_open_input succeeded\n");

#if 0
        ret = 0;
        break;
#endif

        int cnt = pFormatCtx->nb_streams;

        // 找寻【流】信息
        ret = avformat_find_stream_info(pFormatCtx, NULL);
        if (ret != 0) {
            get_error_msg(ret, errorMessage);
            printf("avformat_find_stream_info failed, %s\n", errorMessage.c_str());
            break;
        }
        // ⭕ pFormatCtx 可以得到：
        //    start_time
        //    bit_rate
        //    duration_estimation_method
        //

        // ⭕ pFormatCtx->streams[<videoStreamIndex>]->codec 可以得到
        //    bit_rate
        //    bit_rate_tolerance
        //    width
        //    height
        //    time_base    (AVRational 类型)
        //    ticks_per_frame
        //    coded_width
        //    coded_height
        //    pix_fmt
        //    sample_aspect_ratio    (AVRational 类型)
        //    framerate    (AVRational 类型)
        //    color_primaries, color_trc, colorspace, color_range, chroma_sample_location
        //    bits_per_raw_sample
        //    profile, level
        //    可能打开的文件不同，可能或多或少还有变动
        printf("avformat_find_stream_info succeeded\n");

        // 根据流的信息，获取【解码器】

        // 网络直播无法获取 时长，默认值是：0x8000000000000000
        int64_t minutes = 0;
        int64_t seconds = 0;

        if (pFormatCtx->duration == 0x8000000000000000) {
            minutes = -1;
            seconds = -1;
        }
        else {
            minutes = (pFormatCtx->duration / 1000000) / 60;
            seconds = (pFormatCtx->duration / 1000000) % 60;
        }

        // 第二个参数貌似传啥都行
        //av_dump_format(pFormatCtx, 3, path, 0);
        //printf("\n");

        int audioStreamIdx = -1, videoStreamIdx = -1;

        AVCodec* vCodec = NULL;
        videoStreamIdx = av_find_best_stream(pFormatCtx, AVMediaType::AVMEDIA_TYPE_VIDEO, -1, -1, &vCodec, 0);
        if (videoStreamIdx < 0) {
            printf("av_find_best_stream failed\n");
            break;
        }
        printf("av_find_best_stream succeeded\n");

        vCodecCtx = pFormatCtx->streams[videoStreamIdx]->codec;

        // 上一步没有获得解码器的话
        if (!vCodec) {
            // 与 av_find_best_stream 带回的 vCodec 是同一个
            vCodec = avcodec_find_decoder(vCodecCtx->codec_id);
            if (!vCodec) {
                printf("avcodec_find_decoder failed, codec_id = [ %d ]\n", vCodecCtx->codec_id);
                break;
            }
            printf("avcodec_find_decoder succeeded\n");
        }

        // 打开解码器？
        //  !!! 该函数是 非线程安全的 !!!
        ret = avcodec_open2(vCodecCtx, vCodec, NULL);
        if (ret < 0) {
            get_error_msg(ret, errorMessage);
            printf("avcodec_open2 failed, %s\n", errorMessage.c_str());
            break;
        }
        videoCodecOpened = true;
        // ⭕ AVCodec* vCodec 可以得到：
        //    name, long_name
        //    type(AVMediaType), id(AVCodecID)
        //    capabilities
        //

        // ⭕ pFormatCtx->streams[<videoStreamIndex>]->codec 可以得到
        //    pts_correction_last_pts  初始化为  0x8000000000000000
        //    pts_correction_last_dts  初始化为  0x8000000000000000
        //    codec_descriptor
        //    ->    { id, type, name, long_name, , props, ....}
        printf("avcodec_open2 succeeded\n");

        frame    = av_frame_alloc();
        frameYUV = av_frame_alloc();

        int width = vCodecCtx->width;
        int height = vCodecCtx->height;

        int bytes = 0;

#if 0
        // old API
        bytes = avpicture_get_size(AV_PIX_FMT_YUV420P, width, height);
        pBuf = (uint8_t*)av_malloc(bytes);
        if (!pBuf) {
            printf("av_malloc failed.\n");
            break;
        }

        ret = avpicture_fill((AVPicture*)frameYUV, pBuf, AV_PIX_FMT_YUV420P, width, height);
        if (ret < 0) {
            get_error_msg(ret, errorMessage);
            printf("avpicture_fill failed, %s\n", errorMessage.c_str());
            break;
        }
        printf("avpicture_fill succeeded\n");
#else
        // new API
        bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, width, height, 32);
        pBuf = (uint8_t *)av_malloc(bytes * sizeof(uint8_t));
        if (!pBuf) {
            printf("av_malloc failed.\n");
            break;
        }

        ret = av_image_fill_arrays(frameYUV->data, frameYUV->linesize, pBuf, AV_PIX_FMT_YUV420P, width, height, 32);
        if (ret < 0) {
            get_error_msg(ret, errorMessage);
            printf("av_image_fill_arrays failed, %s\n", errorMessage.c_str());
            break;
        }
        printf("av_image_fill_arrays succeeded\n");
#endif

        //packet
        //sws_getCachedContext() // 这个是单线程使用
        
        SwsFilter* null_filter = NULL;
        // 
        // SWS_BICUBIC —— 双立方插值;双三次;双三次插值;两次立方;双立体
        //
        swsCtx = sws_getContext(width, height, vCodecCtx->pix_fmt, width, height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, null_filter, null_filter, NULL);
        if (!swsCtx) {
            printf("sws_getContext failed.\n");
            break;
        }
        printf("sws_getContext succeeded\n");
        //sws_freeContext

#if 0
        std::string workDir = "C:\\Work\\ffmpeg_exercise\\";
        std::string fileY = workDir + "output_YUV_Y.y";
        std::string fileU = workDir + "output_YUV_U.y";
        std::string fileV = workDir + "output_YUV_V.y";
        fpY = fopen(fileY.c_str(), "wb+");
        fpU = fopen(fileU.c_str(), "wb+");
        fpV = fopen(fileV.c_str(), "wb+");
#endif

        // 开始读取数据包
        int hasErr = 0;
        int gotPic = 0;

        int cnt_pic_packet = 0;
        int cnt_frame = 0;
        
        //pPacket = (AVPacket*)av_malloc(sizeof(AVPacket));
        av_init_packet(&packet);
        while (1)
        {
            int readret = 0;
            readret = av_read_frame(pFormatCtx, &packet);
            if (readret < 0) {
                if (AVERROR_EOF == readret) {
                    printf("end of file.\n");
                    break;
                }
                get_error_msg(readret, errorMessage);
                break;
            }

            if (packet.stream_index == videoStreamIdx) {
                
                // Use avcodec_send_packet() and avcodec_receive_frame().
                ret = avcodec_decode_video2(vCodecCtx, frame, &gotPic, &packet);
                if (ret  < 0) {
                    hasErr = 1;
                    printf("avcodec_decode_video2 failed.\n");
                    break;
                }

                cnt_pic_packet++;

                if (gotPic) {
                    //printf("got pic.\n");
                    cnt_pic_packet = 0;

                    int sliceHeight = sws_scale(swsCtx, (const uint8_t**)frame->data, frame->linesize,
                        0, height,
                        frameYUV->data, frameYUV->linesize);
#if 0
                    fwrite(frameYUV->data[0], 1, width * height, fpY);
                    fwrite(frameYUV->data[1], 1, width * height / 4, fpU);
                    fwrite(frameYUV->data[2], 1, width * height / 4, fpV);
#endif
                    cnt_frame++;
                    
#if 0
                    // avcodec_decode_video2 内部代码，调用 avcodec_receive_frame 时该函数内，进行了 av_frame_unref，把上一个循环得到的无用内存给释放了
                    // 所以也不用主动去释放了。。
                    av_frame_free(&frame);
                    frame = av_frame_alloc();
#endif
      
#if 0
                    // sws_scale 函数也没有对 frameYUV 进行变动，所以这里也无需释放
                    av_frame_free(&frameYUV);
                    ret = CreateAndFillFrame(&frameYUV, width, height, pBuf);
                    if (ret < 0) {
                        hasErr = 1;
                        break;
                    }
#endif

                }

                
            }// video stream data

            // 没有用的话，就释放掉
            av_packet_unref(&packet);
            av_init_packet(&packet);
        }// while 
        if (hasErr) {
            break;
        }
        printf("Now we got [ %d ] frames.\n", cnt_frame);
        
        ret = 0;
    } while (false);

    if (fpY) {
        fclose(fpY); fpY = NULL;
    }
    if (fpU) {
        fclose(fpU); fpU = NULL;
    }
    if (fpV) {
        fclose(fpV); fpV = NULL;
    }
    if (swsCtx) {
        sws_freeContext(swsCtx);
    }
    if (pBuf) {
        av_freep(&pBuf);
    }
    if (frame) {
        av_frame_free(&frame);
    }
    if (frameYUV) {
        av_frame_free(&frameYUV);
    }
    if (videoCodecOpened) {
        avcodec_close(vCodecCtx);
    }
    if (pFormatCtx)
        avformat_close_input(&pFormatCtx);

    return ret;
}


#if 0
void SomeRelation()
{
    // YUV420P -> DirectX Surface
    AVFrame* frameYUV = NULL;
    D3DLOCKED_RECT _d3d_rect;

    int win_width = 800;
    int win_height = 600; // 这个是Windows窗口的大小


    byte* pStr = frameYUV->data[0];
    byte* pStrU = frameYUV->data[1];
    byte* pStrV = frameYUV->data[2];

    byte* pDest = (byte*)_d3d_rect.pBits;
    int nlinesize = _d3d_rect.Pitch;  // Number of bytes in one row of the surface.

    int i = 0;
    // Y
    for (i = 0; i < win_height; i++)
    {
        memcpy(&pDest[i * nlinesize], &pStr[i * win_width], win_width);
    }
    // U
    for (i = 0; i < win_height / 2; i++)
    {
        byte* pDstNow = pDest + nlinesize * win_height + i * nlinesize / 2;
        byte* pSrcNow = pStr + win_width * win_height + i * win_width / 2; // byte* pSrcNow = pStr + (win_width * win_height) + (win_width * win_height / 4) + i * win_width / 2;
        int byteSize = win_width / 2;
        memcpy(pDstNow, pSrcNow, byteSize);
    }
    // V
    for (i = 0; i < win_height / 2; i++)
    {
        // (1 + 1/4 )
        byte* pDstNow = pDest + nlinesize * win_height + nlinesize * win_height / 4 + i * nlinesize / 2;
        byte* pSrcNow = pStr + win_width * win_height + i * win_width / 2;
        int byteSize = win_width / 2;

        memcpy(pDstNow, pSrcNow, byteSize);
    }

}
#endif