/**
 * @file main.cpp
 * @author your name (you@domain.com)
 * @brief 读取h264的rtsp 流，然后保存ts 格式到本地
 * @version 0.1
 * @date 2021-05-11
 * 
 * @copyright Copyright (c) 2021
 * 
 */
#include <memory>
#include <thread>
#include <iostream>
#include "ffmpeg.h"

AVFormatContext *inputContext = nullptr;
AVFormatContext *outputContext;
int64_t lastReadPacktTime;

using namespace std;

void ffmpeg_init();
int OpenInput(const char *url);
int OpenOutPut(const char *url);
shared_ptr<AVPacket> ReadPacketFromSource();
int WritePacket(shared_ptr<AVPacket> packet);
void CloseInput();
void CloseOutput();

//系统默认的H264的time_base= num=1/den=90000        mp4文件的time_base= num=1/den=12800  flv文件的time_base=num=1/den=1000
void av_packet_rescale_ts(AVPacket *pkt, AVRational src_tb, AVRational dst_tb)
{
    if (pkt->pts != AV_NOPTS_VALUE) //AV_NOPTS_VALUE Undefined timestamp value
        pkt->pts = av_rescale_q(pkt->pts, src_tb, dst_tb);
    if (pkt->dts != AV_NOPTS_VALUE)
        pkt->dts = av_rescale_q(pkt->dts, src_tb, dst_tb);
    if (pkt->duration > 0)
        pkt->duration = av_rescale_q(pkt->duration, src_tb, dst_tb);
}

int main(int argc, char *argv[])
{
    ffmpeg_init();

    int ret = OpenInput("rtsp://192.168.8.142:8554/sintel.264");
    if (ret >= 0)
    {
        // ret = OpenOutPut("D:\\test.ts");
        // ret = OpenOutPut("udp://127.0.0.1:12345");//网络流转发 udp 转发
        ret = OpenOutPut("rtmp://127.0.0.1:1935/live/stream0");//网络流转发 rtmp 转发，需要打开RTMP服务支持 ，默认端口为1935 //使用工具 crtmp 提供服务，该程序可以将
    }

    if (ret < 0)
    {
        goto Error;
    }

    while (true)
    {
        auto packet = ReadPacketFromSource();
        if (packet)
        {
            ret = WritePacket(packet);
            if (ret >= 0)
            {
                cout << "WritePacket Success!" << endl;
            }
            else
            {
                cout << "WritePacket failed!" << endl;
            }
        }
        else
        {
            break;
        }
    }
Error:
    CloseInput();
    CloseOutput();

    while (0)
    {
        this_thread::sleep_for(chrono::seconds(100));
    }
    return 0;
}

void ffmpeg_init()
{
    av_register_all();//初始化所有的复用器和解复用器
    avfilter_register_all();//初始化过滤器系统。注册所有内置过滤器
    avformat_network_init();//对网络库进行全局初始化。
    av_log_set_level(AV_LOG_ERROR);
}

static int interrupt_cb(void *ctx)
{
    int timeout = 3;
    if (av_gettime() - lastReadPacktTime > timeout * 1000 * 1000)
    {
        av_log(NULL, AV_LOG_ERROR, "interrupt_cb failed \n");
        return -1;
    }

    return 0;
}

int OpenInput(const char *url)
{
    inputContext = avformat_alloc_context();//创建一个输入的上下文
    lastReadPacktTime = av_gettime();
    inputContext->interrupt_callback.callback = interrupt_cb;//设置回调函数
    int ret = avformat_open_input(&inputContext, url, nullptr, nullptr);//Open an input stream and read the header. The codecs are not opened.The stream must be closed with avformat_close_input().
    if (ret < 0)//AVInputFormat  如果为非NULL，则此参数强制使用特定的输入格式。否则，将自动检测格式
    {
        av_log(NULL, AV_LOG_ERROR, "Input file open input failed \n");
        return ret;
    }

    ret = avformat_find_stream_info(inputContext, nullptr);//Read packets of a media file to get stream information. 
    if (ret < 0)
    {
        av_log(NULL, AV_LOG_ERROR, "Find input file stream inform failed \n");
    }
    else
    {
        av_log(NULL, AV_LOG_INFO, "Open input file %s success \n", url);
    }

    return ret;
}

int OpenOutPut(const char *outUrl)
{
    // int ret = avformat_alloc_output_context2(&outputContext, nullptr, "mpegts", outUrl);//Allocate an AVFormatContext for an output format
    int ret = avformat_alloc_output_context2(&outputContext, nullptr, "flv", outUrl); //如果要使用rtmp 推流的方式，输出格式要修改为flv
    if (ret < 0)//format_name – the name of output format to use for allocating the context ； mpegts 将代表我们生成的目标文件为ts
    {
        av_log(NULL, AV_LOG_ERROR, "Open output context failed \n");
        goto Error;
    }
    ret = avio_open2(&outputContext->pb, outUrl, AVIO_FLAG_WRITE, nullptr, nullptr);//Create and initialize a AVIOContext for accessing there source indicated by url.
    if (ret < 0)//flags  用于控制如何打开由 url 指示的资源 //outputContext->pb「AVIOContext *pb」 打开一个输出上下文
    {
        av_log(NULL, AV_LOG_ERROR, "open avio failed \n");
        goto Error;
    }

    for (int i = 0; i < inputContext->nb_streams; ++i)
    {
        //用输入流的编码信息来创建一个流对像，
        AVStream *stream = avformat_new_stream(outputContext, inputContext->streams[i]->codec->codec);
        //把输入流的编码信息拷贝到输出流的编码信息中，
        ret = avcodec_copy_context(stream->codec, inputContext->streams[i]->codec);
        if (ret < 0)
        {
            av_log(NULL, AV_LOG_ERROR, "copy codec context failed \n");
            goto Error;
        }
    }

    // 分配流的格式信息，并把流的头部信息写到输出文件
    ret = avformat_write_header(outputContext, nullptr);
    if (ret < 0)
    {
        av_log(NULL, AV_LOG_ERROR, "format write header failed \n");
    }

    av_log(NULL, AV_LOG_FATAL, "format write header success \n ");
    return ret;
Error:
    if (outputContext)
    {
        for (size_t i = 0; i < outputContext->nb_streams; i++)
        {
            avcodec_close(outputContext->streams[i]->codec);
        }

        avformat_close_input(&outputContext);
    }
    return ret;
}

shared_ptr<AVPacket> ReadPacketFromSource()
{
    shared_ptr<AVPacket> packet(static_cast<AVPacket *>(av_malloc(sizeof(AVPacket))),
                                [&](AVPacket *p) {
                                    av_packet_free(&p);
                                    av_freep(&p);
                                    cout << __FUNCTION__ << endl;
                                });

    av_init_packet(packet.get());
    lastReadPacktTime = av_gettime();
    int ret = av_read_frame(inputContext, packet.get());
    if (ret >= 0)
    {
        return packet;
    }
    else
    {
        return nullptr;
    }
}

int WritePacket(shared_ptr<AVPacket> packet)
{
    auto inputStream = inputContext->streams[packet->stream_index];
    auto outputStream = outputContext->streams[packet->stream_index];
    av_packet_rescale_ts(packet.get(), inputStream->time_base, outputStream->time_base);//时间戳的基准转换

    return av_interleaved_write_frame(outputContext, packet.get());//交错写 frame ，建议使用。而不是使用 av_write_frame
}

void CloseInput()
{
    if (inputContext != nullptr)
    {
        avformat_close_input(&inputContext);
    }
}

void CloseOutput()
{
    if (outputContext != nullptr)
    {
        for (size_t i = 0; i < outputContext->nb_streams; i++)
        {
            AVCodecContext *codecContext = outputContext->streams[i]->codec;
            avcodec_close(codecContext);
        }

        avformat_close_input(&outputContext);
    }
}
