/**
 * @file rtmp_forward_test.c
 * @author wuzhiping
 * @brief
 * @version 0.1
 * @date 2021-12-11
 *
 * @copyright Copyright (c) 2021
 *
 */
#include <libavformat/avio.h>
#include <libavformat/avformat.h>
#include <libavutil/timestamp.h>
#include <pthread.h>

static void log_packet(const AVFormatContext *fmt_ctx, const AVPacket *pkt, const char *tag)
{
    AVRational *time_base = &fmt_ctx->streams[pkt->stream_index]->time_base;

    printf("%s: pts:%s pts_time:%s dts:%s dts_time:%s duration:%s duration_time:%s stream_index:%d\n",
           tag,
           av_ts2str(pkt->pts), av_ts2timestr(pkt->pts, time_base),
           av_ts2str(pkt->dts), av_ts2timestr(pkt->dts, time_base),
           av_ts2str(pkt->duration), av_ts2timestr(pkt->duration, time_base),
           pkt->stream_index);
}

typedef struct
{
    AVFormatContext *input_server_ctx;
    AVFormatContext *out_server_ctx;
    AVDictionary *out_opts;
} rtmp_forward_param;

void *forward_rtmp_av(void *param)
{
    rtmp_forward_param *p = (rtmp_forward_param *)param;

    AVFormatContext *input_server_ctx = p->input_server_ctx;
    AVFormatContext *out_server_ctx = p->out_server_ctx;
    AVDictionary *out_opts = p->out_opts;

    int i;
    int ret;
    // 3. construct av stream for output context
    int stream_index = 0;
    int stream_mapping_size;

    int *stream_mapping = NULL;
    stream_mapping_size = input_server_ctx->nb_streams;
    stream_mapping = av_mallocz_array(stream_mapping_size, sizeof(*stream_mapping));

    for (i = 0; i < input_server_ctx->nb_streams; i++)
    {
        AVStream *out_stream;
        AVStream *in_stream = input_server_ctx->streams[i];
        AVCodecParameters *in_codecpar = in_stream->codecpar;

        if (in_codecpar->codec_type != AVMEDIA_TYPE_AUDIO &&
            in_codecpar->codec_type != AVMEDIA_TYPE_VIDEO &&
            in_codecpar->codec_type != AVMEDIA_TYPE_SUBTITLE)
        {
            stream_mapping[i] = -1;
            continue;
        }

        stream_mapping[i] = stream_index++;

        out_stream = avformat_new_stream(out_server_ctx, NULL);
        if (!out_stream)
        {
            fprintf(stderr, "Failed allocating output stream\n");
            ret = AVERROR_UNKNOWN;
        }

        ret = avcodec_parameters_copy(out_stream->codecpar, in_codecpar);
        if (ret < 0)
        {
            fprintf(stderr, "Failed to copy codec parameters\n");
        }
        out_stream->codecpar->codec_tag = 0;
    }

    // 4. write header
    ret = avformat_write_header(out_server_ctx, &out_opts);

    // 5. read av frame from input context, change pts and dts, then interleaved write to output context
    AVPacket pkt;
    while (1)
    {
        AVStream *in_stream, *out_stream;

        ret = av_read_frame(input_server_ctx, &pkt);
        if (ret < 0)
            break;

        in_stream = input_server_ctx->streams[pkt.stream_index];
        if (pkt.stream_index >= stream_mapping_size ||
            stream_mapping[pkt.stream_index] < 0)
        {
            av_packet_unref(&pkt);
            continue;
        }

        pkt.stream_index = stream_mapping[pkt.stream_index];
        out_stream = out_server_ctx->streams[pkt.stream_index];
        log_packet(input_server_ctx, &pkt, "in");

        /* copy packet */
        pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX);
        pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX);
        pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
        pkt.pos = -1;
        log_packet(out_server_ctx, &pkt, "out");

        ret = av_interleaved_write_frame(out_server_ctx, &pkt);
        if (ret < 0)
        {
            fprintf(stderr, "Error muxing packet\n");
            break;
        }
        av_packet_unref(&pkt);
    }

    av_write_trailer(out_server_ctx);
}

int main(int argc, char const *argv[])
{

    int ret;
    int i;

    // 1. open avformat input context, eg. obs push stream
    AVIOContext *avio_input_server_ctx = NULL;
    AVFormatContext *input_server_ctx = NULL;
    AVDictionary *in_opts = NULL;

    // av_dict_set(&in_opts, "timeout", "2", 0);
    av_dict_set(&in_opts, "listen", "2", 0);

    avformat_open_input(&input_server_ctx, "rtmp://0.0.0.0:1935/stream/live", NULL, &in_opts);
    if ((ret = avformat_find_stream_info(input_server_ctx, 0)) < 0)
    {
        fprintf(stderr, "Failed to retrieve input stream information");
        return 0;
    }
    av_dump_format(input_server_ctx, 1, "rtmp://0.0.0.0:1935/stream/live", 0);


    // 2. open avformat output context, eg. vlc stream pull and play
    while (1)
    {
        AVDictionary *out_opts = NULL;
        av_dict_set(&out_opts, "listen", "2", 0);
        // AVIOContext *avio_out_server_ctx;
        // avio_open2(&avio_out_server_ctx, "rtmp://0.0.0.0:1936/stream/live", AVIO_FLAG_WRITE, NULL, &out_opts);

        AVFormatContext *out_server_ctx = NULL;
        avformat_alloc_output_context2(&out_server_ctx, NULL, "flv", "rtmp://0.0.0.0:1936/stream/live");
        ret = avio_open2(&out_server_ctx->pb, "rtmp://0.0.0.0:1936/stream/live", AVIO_FLAG_WRITE, NULL, &out_opts);
        if(ret < 0){
            av_log(out_server_ctx,AV_LOG_ERROR,"open output stream error");
            continue;
        }

        // out_server_ctx->pb = avio_out_server_ctx;
        // avformat_find_stream_info(out_server_ctx, NULL);
        av_dump_format(out_server_ctx, 0, NULL, 1);

        // start a thread and process forward
        pthread_t pid;

        rtmp_forward_param *param;
        param->input_server_ctx = input_server_ctx;
        param->out_server_ctx = out_server_ctx;
        param->out_opts = out_opts;

        pthread_create(&pid, NULL, forward_rtmp_av, (void *)param);
        pthread_detach(pid);
    }

    return 0;
}
