/*
 * Copyright (c) 2021 Shenzhen Kaihong Digital Industry Development Co., Ltd.
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
#include "ffmpeg_utils.h"

extern "C" {
    #include "libavcodec/avcodec.h"
    #include "libavformat/avformat.h"
    #include "libavutil/mathematics.h"
    #include "libswscale/swscale.h"
    #include "libavutil/error.h"
}

#include "hilog/log.h"


using namespace OHOS::HiviewDFX;
using OHOS::HiviewDFX::HiLog;
using OHOS::HiviewDFX::HiLogLabel;

using namespace std;

constexpr HiLogLabel LABEL = {LOG_CORE, LOG_DOMAIN, "FfmpegUtils"};

#include <cstdlib>


#define ERROR_BUF(ret) \
    char errbuf[1024]; \
    av_strerror(ret, errbuf, sizeof (errbuf));

int FfmpegUtils::videoCute(const char *in_filename, double start_seconds, double end_seconds, const char *out_filename) {
    HiLog::Info(LABEL, "gyf FfmpegUtils::videoCute begin");
    
    HiLog::Info(LABEL, "gyf in_filename = [%{public}s]", in_filename);
    HiLog::Info(LABEL, "gyf out_filename = [%{public}s]", out_filename);
    HiLog::Info(LABEL, "gyf start_seconds = [%{public}lf]", start_seconds);
    HiLog::Info(LABEL, "gyf end_seconds = [%{public}lf]", end_seconds);
    
    //输入文件格式上下文
    AVFormatContext *ifmt_ctx = nullptr;

    //输出文件格式上下文
    AVFormatContext *ofmt_ctx = nullptr;

    //输出格式
    AVOutputFormat *ofmt = nullptr;

    //音视频包
    AVPacket pkt;
    int ret = 0;
    
    //音视频流
    AVStream *in_stream;

    int64_t *dts_start_from = nullptr;
    int64_t *pts_start_from = nullptr;



    //初始化上下文
    ret = avformat_open_input(&ifmt_ctx, in_filename, 0, 0);

    if (ret < 0) {
        ERROR_BUF(ret);
        HiLog::Error(LABEL, "gyf avformat_open_input error = %{public}s", errbuf);
        return ret;
    }
    
    ret = avformat_alloc_output_context2(&ofmt_ctx, NULL, NULL, out_filename);

    if (ret < 0) {
        ERROR_BUF(ret);
        HiLog::Error(LABEL, "gyf avformat_alloc_output_context2 error = %{public}s", errbuf);
        goto end;
    }
    
    ofmt = ofmt_ctx->oformat;
    
    // 根据流数量申请空间，并全部初始化为0
    dts_start_from = (int64_t *)std::malloc(sizeof(int64_t) * ifmt_ctx->nb_streams);
    memset(dts_start_from, 0, sizeof(int64_t) * ifmt_ctx->nb_streams);
    
    pts_start_from = (int64_t *)std::malloc(sizeof(int64_t) * ifmt_ctx->nb_streams);
    memset(pts_start_from, 0, sizeof(int64_t) * ifmt_ctx->nb_streams);
    
    //创建流以及参数拷贝
    for (int i = 0; i < (int)ifmt_ctx->nb_streams; i++) {
        in_stream = ifmt_ctx->streams[i];
        AVStream *out_stream = avformat_new_stream(ofmt_ctx, NULL);
        if (!out_stream) {
            ret = AVERROR_UNKNOWN;
            goto end;
        }
        avcodec_parameters_copy(out_stream->codecpar, in_stream->codecpar);
        out_stream->codecpar->codec_tag = 0;
    }
    
    //打开输出文件
    ret = avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);
    
    if (ret < 0) {
        ERROR_BUF(ret);
        HiLog::Error(LABEL, "gyf avio_open error = %{public}s", errbuf);
        goto end;
    }
    
    // 写头信息
    ret = avformat_write_header(ofmt_ctx, NULL);
    if (ret < 0) {
        ERROR_BUF(ret);
        HiLog::Error(LABEL, "gyf avformat_write_header error = %{public}s", errbuf);
        goto end;
    }
    
    //跳转到指定帧
    ret = av_seek_frame(ifmt_ctx, -1, start_seconds * AV_TIME_BASE, AVSEEK_FLAG_ANY);
    if (ret < 0) {
        ERROR_BUF(ret);
        HiLog::Error(LABEL, "gyf av_seek_frame error = %{public}s", errbuf);
        goto end;
    }
    
    while (1) {
        AVStream *in_stream;
        AVStream *out_stream;
        
        //读取数据
        ret = av_read_frame(ifmt_ctx, &pkt);
        if (ret < 0) {
            break;
        }
        
        in_stream = ifmt_ctx->streams[pkt.stream_index];
        out_stream = ofmt_ctx->streams[pkt.stream_index];
        
        // 时间超过要截取的时间，就退出循环
        if (av_q2d(in_stream->time_base) * pkt.pts > end_seconds) {
            av_packet_unref(&pkt);
            break;
        }
        
        // 将截取后的每个流的起始dts 、pts保存下来，作为开始时间，用来做后面的时间基转换
        if (dts_start_from[pkt.stream_index] == 0) {
            dts_start_from[pkt.stream_index] = pkt.dts;
        }
        if (pts_start_from[pkt.stream_index] == 0) {
            pts_start_from[pkt.stream_index] = pkt.pts;
        }
        
        // 时间基转换
        pkt.pts = av_rescale_q_rnd(pkt.pts - pts_start_from[pkt.stream_index], in_stream->time_base, out_stream->time_base, (enum AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
        pkt.dts = av_rescale_q_rnd(pkt.dts - dts_start_from[pkt.stream_index], in_stream->time_base,out_stream->time_base, (enum AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));

        if (pkt.pts < 0) {
            pkt.pts = 0;
        }
        
        if (pkt.dts < 0) {
            pkt.dts = 0;
        }
        
        pkt.duration = (int) av_rescale_q((int64_t) pkt.duration, in_stream->time_base, out_stream->time_base);
        pkt.pos = -1;
        
        //一帧视频播放时间必须在解码时间点之后，当出现pkt.pts < pkt.dts时会导致程序异常，所以我们丢掉有问题的帧，不会有太大影响。
        if (pkt.pts < pkt.dts) {
            continue;
        }
        
        ret = av_interleaved_write_frame(ofmt_ctx, &pkt);
        if (ret < 0) {
            ERROR_BUF(ret);
            HiLog::Error(LABEL, "gyf av_interleaved_write_frame error = %{public}s", errbuf);
            break;
        }
        av_packet_unref(&pkt);
    }
    
end:
    //释放资源
    if (dts_start_from) {
        free(dts_start_from);
        dts_start_from = nullptr;
    }
    
    if (pts_start_from) {
        free(pts_start_from);
        pts_start_from = nullptr;
    }
    

    //写文件尾信息
    ret = av_write_trailer(ofmt_ctx);

    if (ret < 0) {
        ERROR_BUF(ret);
        HiLog::Error(LABEL, "gyf av_write_trailer error = %{public}s", errbuf);
    }

    return ret;
}

int FfmpegUtils::videoToAacH264(const char* in_filename, const char* out_filename_video , const char* out_filename_audio) {
    AVFormatContext *ifmt_ctx = nullptr;
    AVFormatContext *ofmt_ctx_audio = nullptr;
    AVFormatContext *ofmt_ctx_video = nullptr;
    AVPacket pkt;
    AVOutputFormat *ofmt_audio = nullptr;
    AVOutputFormat *ofmt_video = nullptr;

    int videoindex = -1;
    int audioindex = -1;
    int frame_index = 0;

    int ret;

    HiLog::Error(LABEL, "guoyuefeng in_filename = %{public}s", in_filename);
    HiLog::Error(LABEL, "guoyuefeng out_filename_video = %{public}s", out_filename_video);
    HiLog::Error(LABEL, "guoyuefeng out_filename_audio = %{public}s", out_filename_audio);

    AVBitStreamFilterContext *h264bsfc = NULL;

    av_register_all();

    ret = avformat_open_input(&ifmt_ctx, in_filename, nullptr, nullptr);
    if (ret < 0) {
        ERROR_BUF(ret);
        HiLog::Error(LABEL, "gyf avformat_open_input error = %{public}s", errbuf);
        return ret;
    }

    ret = avformat_find_stream_info(ifmt_ctx, nullptr);
    if (ret < 0) {
        ERROR_BUF(ret);
        HiLog::Error(LABEL, "gyf avformat_find_stream_info error = %{public}s", errbuf);
        goto end;
    }


    ret = avformat_alloc_output_context2(&ofmt_ctx_video, nullptr, nullptr, out_filename_video);

    if (ret < 0) {
        ERROR_BUF(ret);
        HiLog::Error(LABEL, "gyf video avformat_alloc_output_context2 error = %{public}s", errbuf);
        goto end;
    }

    ofmt_video = ofmt_ctx_video->oformat;

    ret = avformat_alloc_output_context2(&ofmt_ctx_audio, NULL, NULL, out_filename_audio);

    if (ret < 0) {
        ERROR_BUF(ret);
        HiLog::Error(LABEL, "gyf audio avformat_alloc_output_context2 error = %{public}s", errbuf);
        goto end;
    }

    ofmt_audio = ofmt_ctx_audio->oformat;



    
    // 一般情况下nb_streams只有两个流，就是streams[0],streams[1]，分别是音频和视频流，不过顺序不定
    for (int i = 0; i < (int)ifmt_ctx->nb_streams; i++)
    {
        AVFormatContext *ofmt_ctx;
        AVStream *in_stream = ifmt_ctx->streams[i];
        AVStream *out_stream = NULL;

        // 根据音视频类型，根据输入流创建输出流
        if (ifmt_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
        {
            videoindex = i;
            out_stream = avformat_new_stream(ofmt_ctx_video, nullptr);
            ofmt_ctx = ofmt_ctx_video;
        } else if (ifmt_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
            audioindex = i;
            out_stream = avformat_new_stream(ofmt_ctx_audio, nullptr);
            ofmt_ctx = ofmt_ctx_audio;
        } else {
             break;
        }

        if (!out_stream)
        {
            HiLog::Error(LABEL, "gyf out_stream new failed!");
            return -1;
        }

        // in_codec_ctx = avcodec_alloc_context3(NULL);
        // avcodec_parameters_to_context(in_codec_ctx, in_stream->codecpar);

        // out_codec_ctx = avcodec_alloc_context3(NULL);
        // avcodec_parameters_to_context(out_codec_ctx, out_stream->codecpar);



        // 复制到输出流
        ret = avcodec_copy_context(out_stream->codec, in_stream->codec);

        if (ret < 0) {
            ERROR_BUF(ret);
            HiLog::Error(LABEL, "gyf avcodec_copy_context error = %{public}s", errbuf);
            goto end;
        }

        out_stream->codec->codec_tag = 0;

        if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
            out_stream->codec->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
    }

    // 打开输出文件
    if (!(ofmt_video->flags & AVFMT_NOFILE))
    {
        ret = avio_open(&ofmt_ctx_video->pb, out_filename_video, AVIO_FLAG_WRITE);

        if (ret < 0) {
            ERROR_BUF(ret);
            HiLog::Error(LABEL, "gyf video avio_open error = %{public}s", errbuf);
            goto end;
        }
    }
    if (!(ofmt_audio->flags & AVFMT_NOFILE))
    {
        ret = avio_open(&ofmt_ctx_audio->pb, out_filename_audio, AVIO_FLAG_WRITE);
        
        if (ret < 0) {
            ERROR_BUF(ret);
            HiLog::Error(LABEL, "gyf audio avio_open error = %{public}s", errbuf);
            goto end;
        } 
    }

    // 写文件头部
    if (avformat_write_header(ofmt_ctx_video, NULL) < 0)
    {
        // printf_s("Error occurred when opening video output file\n");
        return -1;
    }
    if (avformat_write_header(ofmt_ctx_audio, NULL) < 0)
    {
        // printf_s("Error occurred when opening video output file\n");
        return -1;
    }

    // 分离某些封装格式（例如MP4/FLV/MKV等）中的H.264的时候，需要首先写入SPS和PPS，否则会导致分离出来的数据
    // 没有SPS、PPS而无法播放。使用ffmpeg中名称为“h264_mp4toannexb”的bitstream filter处理
    h264bsfc = av_bitstream_filter_init("h264_mp4toannexb");

    while (1)
    {
        AVFormatContext *ofmt_ctx;
        AVStream *in_stream, *out_stream;

        //Get an AVPacket
        if (av_read_frame(ifmt_ctx, &pkt) < 0)
            break;
        in_stream = ifmt_ctx->streams[pkt.stream_index];
        // stream_index标识该AVPacket所属的视频/音频流
        if (pkt.stream_index == videoindex)
        {
            // 前面已经通过avcodec_copy_context()函数把输入视频/音频的参数拷贝至输出视频/音频的AVCodecContext结构体
            // 所以使用的就是ofmt_ctx_video的第一个流streams[0]
            out_stream = ofmt_ctx_video->streams[0];
            ofmt_ctx = ofmt_ctx_video;
            // printf_s("Write Video Packet. size:%d\tpts:%lld\n", pkt.size, pkt.pts);
            av_bitstream_filter_filter(h264bsfc, in_stream->codec, NULL, &pkt.data, &pkt.size, pkt.data, pkt.size, 0);
        }
        else if (pkt.stream_index == audioindex)
        {
            out_stream = ofmt_ctx_audio->streams[0];
            ofmt_ctx = ofmt_ctx_audio;
            // printf_s("Write Audio Packet. size:%d\tpts:%lld\n", pkt.size, pkt.pts);
        }
        else
            continue;

        // DTS（Decoding Time Stamp）解码时间戳
        // PTS（Presentation Time Stamp）显示时间戳
        // 转换PTS/DTS
        pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
        pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
        pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
        pkt.pos = -1;
        pkt.stream_index = 0;

        // 写
        if (av_interleaved_write_frame(ofmt_ctx, &pkt) < 0)
        {
            break;
        }

        av_free_packet(&pkt);
        frame_index++;
    }

    av_bitstream_filter_close(h264bsfc);

    // 写文件尾部
    av_write_trailer(ofmt_ctx_video);
    av_write_trailer(ofmt_ctx_audio);

end:
    avformat_close_input(&ifmt_ctx);
    if (ofmt_ctx_video && !(ofmt_video->flags & AVFMT_NOFILE))
        avio_close(ofmt_ctx_video->pb);

    if (ofmt_ctx_audio && !(ofmt_audio->flags & AVFMT_NOFILE))
        avio_close(ofmt_ctx_audio->pb);

    avformat_free_context(ofmt_ctx_video);
    avformat_free_context(ofmt_ctx_audio);
    
    return ret;
    
}