package com.byron.media.server.ffmpeg;

import com.byron.media.server.utils.FrameInfo;
import org.bytedeco.javacpp.BytePointer;
import org.bytedeco.javacpp.DoublePointer;
import org.bytedeco.javacpp.IntPointer;
import org.bytedeco.javacpp.Pointer;

import static org.bytedeco.javacpp.avcodec.*;
import static org.bytedeco.javacpp.avformat.av_register_all;
import static org.bytedeco.javacpp.avformat.avformat_network_init;
import static org.bytedeco.javacpp.avutil.*;
import static org.bytedeco.javacpp.swscale.*;

/**
 * 基于ff实现的编码器
 * TODO 还要检查内存泄露问题
 */
public class FfmpegUtils {

    private AVCodecContext pOutVideoCodecCtx;	// H264编码器Context
    private AVPacket pOutVideoPacket;
    private AVFrame pOutVideoFrame;
    private AVFrame pRgbVideoFrame;
    private AVFrame pYuvVideoFrame;
    private SwsContext pRgbSwsContext;				// 转换格式用
    private SwsContext pYuvSwsContext;				// 转换格式用
    private FrameListener frameListener;
    private byte[] h264OutData;

    public int encodeInit(int width, int height, int dpi
            , int frameRate){
        av_register_all();
        avformat_network_init();
        AVCodec pOutVideoCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
//        avcodec.AVCodec pOutVideoCodec = avcodec_find_encoder_by_name("nvenc_h264");
        pOutVideoPacket = new AVPacket();
        av_init_packet(pOutVideoPacket);
        h264OutData = new byte[1024 * 1024 * 2];

        if(pOutVideoCodec == null){
            return -1;
        }

        pOutVideoCodecCtx = avcodec_alloc_context3(pOutVideoCodec);
        if(pOutVideoCodecCtx == null){
            return -2;
        }

        initOutCodecContext(pOutVideoCodecCtx, width, height, dpi);

        // 新增语句，设置为编码延迟
        av_opt_set(pOutVideoCodecCtx.priv_data(), "preset", "superfast", 0);
        // 实时编码关键看这句，上面那条无所谓
        av_opt_set(pOutVideoCodecCtx.priv_data(), "tune", "zerolatency", 0);

        int ret = avcodec_open2(pOutVideoCodecCtx, pOutVideoCodec, (AVDictionary)null);
        if (ret < 0) {
            return -3;
        }

//        pRgbVideoFrame.linesize().position(0);


//        pYuvVideoFrame = av_frame_alloc();


        // 初始化输出帧数据
        pOutVideoFrame = av_frame_alloc();
        pOutVideoFrame
                .format(pOutVideoCodecCtx.pix_fmt())
                .width(width)
                .height(height);
        ret = av_image_alloc(
                pOutVideoFrame.data(),
                pOutVideoFrame.linesize(),
                pOutVideoFrame.width(),
                pOutVideoFrame.height(),
                pOutVideoFrame.format(),
                16);

        if (ret < 0) {
            return -5;
        }
        return 0;
    }

    private void initOutCodecContext(AVCodecContext codecContext, int width, int height, int dpi){
        codecContext
                //目标的码率，即采样的码率；显然，采样码率越大，视频大小越大
                .bit_rate(width * height * dpi)
                .rc_max_rate(width * height * dpi)
                .rc_min_rate(width * height * dpi)

                //编码目标的视频帧大小，以像素为单位
                .width(width)
                .height(height)

                //帧率的基本单位，我们用分数来表示，
                .time_base(new AVRational().num(1).den(10))

                //像素的格式，也就是说采用什么样的色彩空间来表明一个像素点
                .pix_fmt(AV_PIX_FMT_YUV420P)

                //每20帧插入1个I帧，I帧越少，视频越小
                .gop_size(20)

                //两个非B帧之间允许出现多少个B帧数
                //设置0表示不使用B帧
                //b 帧越多，图片越小
                .max_b_frames(0)

                //最大和最小量化系数
                .qmin(10)
                .qmax(50)

                //因为我们的量化系数q是在qmin和qmax之间浮动的，
                //qblur表示这种浮动变化的变化程度，取值范围0.0～1.0，取0表示不削减
                .qblur(0)

                //运动场景预判功能的力度，数值越大编码时间越长
                .me_pre_cmp(2)

                //DCT变换算法的设置，有7种设置，这个算法的设置是根据不同的CPU指令集来优化的取值范围在0-7之间
                .dct_algo(0)

                //这两个参数表示对过亮或过暗的场景作masking的力度，0表示不作
                .lumi_masking(0)
                .dark_masking(0);
    }


    public int encoderRelease() {
        if (pOutVideoCodecCtx != null) {
            avcodec_free_context(pOutVideoCodecCtx);
            pOutVideoCodecCtx.setNull();
            pOutVideoCodecCtx = null;
        }

        if (pOutVideoFrame != null) {
            av_frame_free(pOutVideoFrame);
            pOutVideoFrame.setNull();
            pOutVideoFrame = null;
        }

        if (pRgbVideoFrame != null) {
            av_frame_free(pRgbVideoFrame);
            pRgbVideoFrame = null;
        }

        if (pRgbSwsContext != null) {
            sws_freeContext(pRgbSwsContext);
            pRgbSwsContext = null;
        }

        return 0;
    }


    /**
     * 将RGB图像推送到ff中
     * @param rgb
     * @param width
     * @param height
     * @return
     */
    public int pushRGB(int[] rgb, int width, int height, long pts, long dts) {

        int ret;
        if(pRgbVideoFrame == null){
            pRgbVideoFrame = av_frame_alloc();
            pRgbVideoFrame
                    .format(AV_PIX_FMT_BGRA)
                    .width(width)
                    .height(height);
            ret = av_image_alloc(
                    pRgbVideoFrame.data(),
                    pRgbVideoFrame.linesize(),
                    pRgbVideoFrame.width(),
                    pRgbVideoFrame.height(),
                    pRgbVideoFrame.format(),
                    16);
            if (ret < 0) {
                return -5;
            }
        }

        if (pRgbSwsContext == null) {
            pRgbSwsContext = sws_getContext(
//                    pOutVideoFrame.width(), pOutVideoFrame.height(), AV_PIX_FMT_ARGB,
                    pOutVideoFrame.width(), pOutVideoFrame.height(), AV_PIX_FMT_BGRA,
                    pOutVideoFrame.width(), pOutVideoFrame.height(), AV_PIX_FMT_YUV420P,
                    SWS_SINC,
                    null,
                    null,
                    (DoublePointer)null);
        }

        IntPointer row = new IntPointer(rgb);
        row.position(0);
        BytePointer dst = pRgbVideoFrame.data(0);
        dst.position(0);
        Pointer.memcpy(dst, row, width * height * 4);
        row.close();

        sws_scale(pRgbSwsContext,
                pRgbVideoFrame.data(),
                pRgbVideoFrame.linesize(),
                0,
                pRgbVideoFrame.height(),
                pOutVideoFrame.data(),
                pOutVideoFrame.linesize());

        ret = avcodec_send_frame(pOutVideoCodecCtx, pOutVideoFrame);
        pOutVideoFrame.pts(pOutVideoFrame.pts() + 1);

        ret = avcodec_receive_packet(pOutVideoCodecCtx, pOutVideoPacket);
        if(ret < 0) {
            return 0;
        }

        int size = pOutVideoPacket.size();

        pOutVideoPacket.data().position(0);
        pOutVideoPacket.data().get(h264OutData, 0, size);

        if(frameListener != null){
            frameListener.onFrame(new FrameInfo(h264OutData, 0, size, pOutVideoPacket.pts(), pOutVideoPacket.dts(), true, pOutVideoPacket.duration()));
        }
        return 0;
    }

    public int pushYUV(byte[] y, byte[] u, byte[] v, int format, int width, int height, long pts, long dts, long duration){
        int ret;
        if(pYuvVideoFrame == null){
            pYuvVideoFrame = av_frame_alloc();
            pYuvVideoFrame
                    .format(format)
                    .width(width)
                    .height(height);
            ret = av_image_alloc(
                    pYuvVideoFrame.data(),
                    pYuvVideoFrame.linesize(),
                    pYuvVideoFrame.width(),
                    pYuvVideoFrame.height(),
                    pYuvVideoFrame.format(),
                    16);
            if (ret < 0) {
                return -5;
            }
        }

        if (pYuvSwsContext == null) {
            pYuvSwsContext = sws_getContext(
//                    pOutVideoFrame.width(), pOutVideoFrame.height(), AV_PIX_FMT_ARGB,
                    pYuvVideoFrame.width(), pYuvVideoFrame.height(), AV_PIX_FMT_YUV420P,
                    pOutVideoFrame.width(), pOutVideoFrame.height(), AV_PIX_FMT_YUV420P,
                    SWS_SINC,
                    null,
                    null,
                    (DoublePointer)null);
        }

        // Y
//        BytePointer yRow = new BytePointer(y);
//        Pointer.memcpy(pYuvVideoFrame.data(0), new BytePointer(y), y.length);
//        yRow.close();
        pYuvVideoFrame.data(0).position(0).put(y);
        pYuvVideoFrame.data(1).position(0).put(u);
        pYuvVideoFrame.data(2).position(0).put(v);


//        // U
//        BytePointer uRow = new BytePointer(u);
//        uRow.position(0);
//        BytePointer uDst = pYuvVideoFrame.data(0);
//        uDst.position(0);
//        Pointer.memcpy(uDst, uRow, u.length);
//
//
//        // V
//        BytePointer vRow = new BytePointer(u);
//        vRow.position(0);
//        BytePointer vDst = pYuvVideoFrame.data(0);
//        vDst.position(0);
//        Pointer.memcpy(vDst, vRow, v.length);


        ret = sws_scale(pYuvSwsContext,
                pYuvVideoFrame.data(),
                pYuvVideoFrame.linesize(),
                0,
                pYuvVideoFrame.height(),
                pOutVideoFrame.data(),
                pOutVideoFrame.linesize());

        // 解码出YUV
        int linesize0 = pOutVideoFrame.linesize(0);
        int linesize1 = pOutVideoFrame.linesize(1);
        int linesize2 = pOutVideoFrame.linesize(2);
        int inWidth = pOutVideoFrame.width();
        int inHeight = pOutVideoFrame.height();
        int inFormat = pOutVideoFrame.format();

        ret = avcodec_send_frame(pOutVideoCodecCtx, pOutVideoFrame);
        pOutVideoFrame.pts(pOutVideoFrame.pts() + 1);

        ret = avcodec_receive_packet(pOutVideoCodecCtx, pOutVideoPacket);
        if(ret < 0){
            return -1;
        }
        int size = pOutVideoPacket.size();

        pOutVideoPacket.data().position(0);
        pOutVideoPacket.data().get(h264OutData, 0, size);

        if(frameListener != null){
            frameListener.onFrame(new FrameInfo(h264OutData, 0, size, pts, dts, true, duration));
        }
        return 0;
    }

    public void setFrameListener(FrameListener frameListener) {
        this.frameListener = frameListener;
    }
}
