package com.byron.media.server.ffmpeg;

import lombok.extern.slf4j.Slf4j;
import org.bytedeco.javacpp.avcodec;
import org.bytedeco.javacpp.avformat;

import java.io.File;
import java.io.IOException;

import static org.bytedeco.javacpp.avcodec.*;
import static org.bytedeco.javacpp.avformat.*;
import static org.bytedeco.javacpp.avutil.*;

@Slf4j
public class Mp4Demo {


    private int ptsInc = 0;
    private int waitkey = 1;;
    private int STREAM_FRAME_RATE = 25;
    private AVFormatContext m_pOc;
    private int vi;

    private int width = 1280;
    private int height = 720;
    private int dpi = 1;

    /**
     * 是否为空闲的帧
     * @return
     */
    public boolean isIdrFrame2(byte data, int length){
        switch (data & 0x1f){
            case 7: // SPS
                return true;
            case 8: // PPS
                return true;
            case 5:
                return true;
            case 1:
                return false;
            default:
                return false;
        }
    }


    boolean isIdrFrame1(byte[] buf, int length){
        //主要是解析idr前面的sps pps
//    static bool found = false;
//    if(found){ return true;}

        int last = 0;
        for (int i = 2; i <= length; ++i){
            if (i == length) {
                if (last > 0) {
                    boolean ret = isIdrFrame2(buf[last] ,i - last);
                    if (ret) {
                        //found = true;
                        return true;
                    }
                }
            } else if (buf[i - 2]== 0x00 && buf[i - 1]== 0x00 && buf[i] == 0x01) {
                if (last > 0) {
                    int size = i - last - 3;
                    if (buf[i - 3] > 0) ++size;
                    boolean  ret = isIdrFrame2(buf[last] ,size);
                    if (ret) {
                        //found = true;
                        return true;
                    }
                }
                last = i + 1;
            }
        }
        return false;

    }


    private avformat.AVStream add_stream(avformat.AVFormatContext oc,
        avcodec.AVCodec[] codec, int codec_id) {
        avcodec.AVCodecContext c;
        avformat.AVStream st;
        /* find the encoder */
        codec[0] = avcodec_find_encoder(codec_id);
//        codec[0] = avcodec_find_decoder(codec_id);
        if (codec[0] == null)
        {
            log.info(String.format("could not find encoder for '%s' \n", avcodec_get_name(codec_id)));
            return null;
        }
        st = avformat_new_stream(oc, codec[0]);
        if (st == null) {
            log.info("could not allocate stream \n");
            return null;
        }
        st.id(oc.nb_streams() - 1);
        c = st.codec();
        vi = st.index();
        switch (codec[0].type())
        {
            case AVMEDIA_TYPE_AUDIO:
                log.info("AVMEDIA_TYPE_AUDIO\n");
                c.sample_fmt(codec[0].sample_fmts().isNull() ? codec[0].sample_fmts().get(0) : AV_SAMPLE_FMT_FLTP);
                c.bit_rate(64000);
                c.sample_rate(44100);
                c.channels(2);
                break;
            case AVMEDIA_TYPE_VIDEO:
                log.info("AVMEDIA_TYPE_VIDEO\n");
                c.codec_id(AV_CODEC_ID_H264);
                initOutCodecContext(c, width, height, dpi);
//                c.codec_id(AV_CODEC_ID_H264);
//                c.bit_rate(0);
//                c.width(1080);
//                c.height(720);
//                c.time_base().den(50);
//                c.time_base().num(1);
//                c.gop_size(1);
//                c.pix_fmt(AV_PIX_FMT_YUV420P);
//                if (c.codec_id() == AV_CODEC_ID_MPEG2VIDEO)
//                {
//                    c.max_b_frames(2);
//                }
//                if (c.codec_id() == AV_CODEC_ID_MPEG1VIDEO)
//                {
//                    c.mb_decision(2);
//                }
                break;
            default:
                break;
        }
        int ret = oc.oformat().flags() & AVFMT_GLOBALHEADER;
        if (ret > 0) {
            c.flags(c.flags() | AV_CODEC_FLAG_GLOBAL_HEADER);
        }
        return st;
    }

    private void initOutCodecContext(AVCodecContext codecContext, int width, int height, int dpi){
        codecContext
                //目标的码率，即采样的码率；显然，采样码率越大，视频大小越大
                .bit_rate(width * height * dpi)
                .rc_max_rate(width * height * dpi)
                .rc_min_rate(width * height * dpi)

                //编码目标的视频帧大小，以像素为单位
                .width(width)
                .height(height)

                //帧率的基本单位，我们用分数来表示，
                .time_base(new AVRational().num(1).den(10))

                //像素的格式，也就是说采用什么样的色彩空间来表明一个像素点
                .pix_fmt(AV_PIX_FMT_YUV420P)

                //每20帧插入1个I帧，I帧越少，视频越小
                .gop_size(20)

                //两个非B帧之间允许出现多少个B帧数
                //设置0表示不使用B帧
                //b 帧越多，图片越小
                .max_b_frames(0)

                //最大和最小量化系数
                .qmin(10)
                .qmax(50)

                //因为我们的量化系数q是在qmin和qmax之间浮动的，
                //qblur表示这种浮动变化的变化程度，取值范围0.0～1.0，取0表示不削减
                .qblur(0)

                //运动场景预判功能的力度，数值越大编码时间越长
                .me_pre_cmp(2)

                //DCT变换算法的设置，有7种设置，这个算法的设置是根据不同的CPU指令集来优化的取值范围在0-7之间
                .dct_algo(0)

                //这两个参数表示对过亮或过暗的场景作masking的力度，0表示不作
                .lumi_masking(0)
                .dark_masking(0);
    }


    void open_video(avformat.AVFormatContext oc, AVCodec codec, avformat.AVStream st) {
        int ret;
        AVCodecContext c = st.codec();
        /* open the codec */
        ret = avcodec_open2(c, codec, (AVDictionary) null);
        if (ret < 0)
        {
            log.info("could not open video codec");
            //exit(1);
        }
    }

    public int CreateMp4(String filename) {

        File file = new File(filename);
        if(file.exists()){
            file.delete();
        }
        try {
            file.createNewFile();
        } catch (IOException e) {
            e.printStackTrace();
        }

        int ret; // 成功返回0，失败返回1
        String pszFileName = "file://" + filename;
        AVOutputFormat fmt = null;
        AVCodec video_codec = null;
        AVStream m_pVideoSt = null;
        av_register_all();
        m_pOc = new AVFormatContext();
        ret = avformat_alloc_output_context2(m_pOc, null, null, filename);
        if (ret < 0) {
            log.info("Could not deduce output format from file extension: using MPEG. \n");
            ret = avformat_alloc_output_context2(m_pOc, null, "mpeg", filename);
        }
        if (ret < 0) {
            return -1;
        }
        fmt = m_pOc.oformat();
        if (fmt.video_codec() != AV_CODEC_ID_NONE) {
            log.info("add_stream");
//            video_codec = avcodec_find_encoder(fmt.video_codec());
//            video_codec = avcodec_find_decoder(fmt.video_codec());
            AVCodec[] codecReference = new AVCodec[1];
            m_pVideoSt = add_stream(m_pOc, codecReference, fmt.video_codec());
            video_codec = codecReference[0];
        }
        if (m_pVideoSt != null) {
            log.info("open_video");
            open_video(m_pOc, video_codec, m_pVideoSt);
        }
        log.info("==========Output Information==========");
        av_dump_format(m_pOc, 0, filename, 1);
        log.info("======================================");
        /* open the output file, if needed */
        ret = fmt.flags() & AVFMT_NOFILE;
        if (ret == 0) {
            ret = avio_open(m_pOc.pb(), filename, AVIO_FLAG_WRITE);
//            ret = avio_open2(m_pOc.pb(), pszFileName, AVIO_FLAG_READ_WRITE, null, null);
            if (ret < 0)  {
                log.info(String.format("could not open %s", pszFileName));
                return -1;
            }
        }
        /* Write the stream header, if any */
        ret = avformat_write_header(m_pOc, (AVDictionary) null);
        if (ret < 0)
        {
            log.info("Error occurred when opening output file");
            return -1;
        }
        return 0;
    }


    public void WriteVideo(byte[] data, int nLen) {
        int ret;
        if ( 0 > vi )
        {
            log.info("vi less than 0");
            //return -1;
        }
        AVStream pst = m_pOc.streams(vi);
        //log.info("vi=====%d",vi);
        // Init packet
        AVPacket pkt = new AVPacket();
        // 我的添加，为了计算pts
        AVCodecContext c = pst.codec();
        av_init_packet(pkt);
        boolean isI = isIdrFrame1(data, nLen);
//        log.info("isIFrame is %d",isI);
        pkt.flags(pkt.flags() | (isI ? AV_PKT_FLAG_KEY : 0));
        pkt.stream_index(pst.index());
        pkt.data().put(data, 0, nLen);
        pkt.size(nLen);
        // Wait for key frame
        if (waitkey > 0) {
            ret = pkt.flags() & AV_PKT_FLAG_KEY;
            if (0 == ret){
                return ;
            }
            else
                waitkey = 0;
        }
        pkt.pts((ptsInc++) * (90000/STREAM_FRAME_RATE));
        pkt.pts(av_rescale_q((ptsInc++)*2, pst.codec().time_base(), pst.time_base()));
        //pkt.dts = (ptsInc++) * (90000/STREAM_FRAME_RATE);
        //  pkt.pts=av_rescale_q_rnd(pkt.pts, pst.time_base,pst.time_base,(AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
        pkt.dts(av_rescale_q_rnd(pkt.dts(), pst.time_base(), pst.time_base(), (AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX)));
        pkt.duration(av_rescale_q(pkt.duration(), pst.time_base(), pst.time_base()));
        pkt.pos(-1);
        log.info(String.format("pkt.size=%d",pkt.size()));
        ret = av_interleaved_write_frame(m_pOc, pkt);
        if (ret < 0)
        {
            log.info("cannot write frame");
        }
    }

    public void CloseMp4() {
        waitkey = -1;
        vi = -1;
        if (m_pOc != null)
            av_write_trailer(m_pOc);
        int ret = m_pOc.oformat().flags() & AVFMT_NOFILE;
        if (m_pOc != null && (ret == 0))
            avio_close(m_pOc.pb());
        if (m_pOc != null) {
            avformat_free_context(m_pOc);
            m_pOc = null;
        }
    }

}
