package com.ytf.dogbox.inteflyCamera;

import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Build;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;

import androidx.annotation.RequiresApi;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.ConcurrentLinkedDeque;

/**
 * Created by ZhangHao on 2016/8/5.
 * 用于硬件解码(MediaCodec)H264的工具类
 */
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
public class MediaCodecUtil {

    //解码后显示的surface及其宽高
    private SurfaceHolder holder;
    private int width, height;
    //解码器
    private MediaCodec mCodec;
    private boolean isFirst = true;
    // 需要解码的类型
    private final static String MIME_TYPE = "video/avc"; // H.264 Advanced Video
    private final static int TIME_INTERNAL = 5;

    private Surface surfaceView;

    /**
     * 初始化解码器
     *
     * @param holder 用于显示视频的surface
     * @param width  surface宽
     * @param height surface高
     */
    public MediaCodecUtil(SurfaceHolder holder, int width, int height) {
//        logger.d("MediaCodecUtil() called with: " + "holder = [" + holder + "], " +
//                "width = [" + width + "], height = [" + height + "]");
        this.holder = holder;
        this.width = width;
        this.height = height;
        surfaceView=this.holder.getSurface();
//        doPicWork();
//        doPicWork1();
    }

    public MediaCodecUtil(SurfaceHolder holder) {
        this(holder, holder.getSurfaceFrame().width(), holder.getSurfaceFrame().height());
    }

    public MediaCodecUtil(Surface surface, int cameraWidth, int cameraHeight){
        this.surfaceView=surface;
        this.width=cameraWidth;
        this.height=cameraHeight;
    }

    public void startCodec() {
        if (isFirst) {
            //第一次打开则初始化解码器
            initDecoder();
        }
    }


    private void initDecoder() {
        try {
            //根据需要解码的类型创建解码器
            mCodec = MediaCodec.createDecoderByType(MIME_TYPE);
        } catch (IOException e) {
            e.printStackTrace();
        }
        //初始化MediaFormat
        MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE,
                width, height);
        mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
        if (mCodec!=null){
            //配置MediaFormat以及需要显示的surface
            mCodec.configure(mediaFormat, surfaceView, null, 0);
            //开始解码
            mCodec.start();
            isFirst = false;
        }

    }

    int mCount = 0;



    public boolean onFrame(byte[] buf, int offset, int length) {
//        Log.i("tiwolf", "onFrame: 开始解码" );
        try{
            long delong= System.currentTimeMillis();
            if (mCodec==null)return false;
            //-1表示一直等待；0表示不等待；其他大于0的参数表示等待毫秒数
            int inputBufferIndex = mCodec.dequeueInputBuffer(-1);
            if (inputBufferIndex >= 0) {
//            ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
                ByteBuffer inputBuffer = mCodec.getInputBuffer(inputBufferIndex);
                //清空buffer
                inputBuffer.clear();
                //put需要解码的数据
                inputBuffer.put(buf, offset, length);
                //解码
                mCodec.queueInputBuffer(inputBufferIndex, offset, length,  System.nanoTime(), 0);
                mCount++;

            } else {
                return false;
            }
//        Log.i("tiwolf", "onFrame: 视频 开始解码，添加byteBuffer进去"+(System.currentTimeMillis()-delong));
            // 获取输出buffer index
            MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
            int outputBufferIndex = mCodec.dequeueOutputBuffer(bufferInfo, 100);
            //循环解码，直到数据全部解码完成
            while (outputBufferIndex >= 0) {
                //logger.d("outputBufferIndex = " + outputBufferIndex);
                //true : 将解码的数据显示到surface上
//            ByteBuffer outputBuffer = mCodec.getOutputBuffer(outputBufferIndex);
////            Log.i("tiwolf", "onFrame: 使用surfaceview后数据"+outputBuffer);
//            if (outputBuffer!=null){
////                Log.i("tiwolf", "onFrame: 可以获取到帧数据" );
//                long start= System.currentTimeMillis();
//                ByteBuffer dupBuffer = outputBuffer.duplicate();
//                ByteBuffer nv21Buffer = outputBuffer.duplicate();
//                ByteBuffer yuvBuffer = outputBuffer.duplicate();
////                Log.i("tiwolf", "onFrame: 复制内存需要时间"+(System.currentTimeMillis()-start) );
//                setOutByte(outputBuffer,dupBuffer,nv21Buffer,yuvBuffer,bufferInfo);
//            }
                mCodec.releaseOutputBuffer(outputBufferIndex, true);
                outputBufferIndex = mCodec.dequeueOutputBuffer(bufferInfo, 0);
            }
//        Log.i("tiwolf", "onFrame:视频 开始解码 解码一帧所需时间"+(System.currentTimeMillis()-delong) );
            if (outputBufferIndex < 0) {
                //logger.e("outputBufferIndex = " + outputBufferIndex);
//            Log.i("tiwolf", "onFrame: 超时问题"+outputBufferIndex );
            }
        }catch (MediaCodec.CodecException e){
            Log.e("TAG", "onFrame: 解码出现了解码错误");
            e.printStackTrace();
            if (mCodec!=null){
                Log.e("TAG", "onFrame: 解码出现重新初始化-222");
                mCodec.stop();
                mCodec.release();
                initDecoder();
            }
        }catch (MediaCodec.CryptoException e){
            Log.e("TAG", "onFrame: 解码出现了非法帧");
            e.printStackTrace();
            if (mCodec!=null){
                Log.e("TAG", "onFrame: 解码出现重新初始化000");
                mCodec.stop();
                mCodec.release();
                initDecoder();
            }
        }catch (Exception e){
            Log.e("TAG", "onFrame: 解码出现的其他错误");
            e.printStackTrace();
            if (mCodec!=null){
                Log.e("TAG", "onFrame: 解码出现重新初始化");
                mCodec.stop();
                mCodec.release();
                initDecoder();
            }
        }

//        System.gc();
        return true;
    }

    int num=2;
    byte[] mConfigByte;
    private void setOutByte(ByteBuffer outputBufffer, ByteBuffer dupBuffer, ByteBuffer nv21Buffer, ByteBuffer yuvBuffer, MediaCodec.BufferInfo bufferInfo){
        byte[] outData = new byte[bufferInfo.size];
        byte[] copyData=new byte[bufferInfo.size];
        byte[] nv21Data = new byte[bufferInfo.size];
        byte[] yuvData=new byte[bufferInfo.size];
        outputBufffer.get(outData);
        dupBuffer.get(copyData);
        nv21Buffer.get(nv21Data);
        yuvBuffer.get(yuvData);
        if (outData.length>0){
            if (bufferInfo.flags == 2) {
                mConfigByte = new byte[bufferInfo.size];
                mConfigByte = outData;
            } else if (bufferInfo.flags == 1) {
                byte[] keyframe = new byte[bufferInfo.size + mConfigByte.length];
                System.arraycopy(mConfigByte, 0, keyframe, 0, mConfigByte.length);
                System.arraycopy(outData, 0, keyframe, mConfigByte.length, outData.length);
//            try {
//                outputStream.write(keyframe, 0, keyframe.length);
//            } catch (IOException e) {
//                e.printStackTrace();
//            }
                iByteArray.setByteArray(keyframe,width,height);
            } else {
//            try {
//                outputStream.write(outData, 0, outData.length);
//            } catch (IOException e) {
//                e.printStackTrace();
//            }
                long start= System.currentTimeMillis();
                if (num==0){

                    BytePicMsg bytePicMsg=new BytePicMsg(outData,nv21Data);
                    cacheLinkedDeque.add(bytePicMsg);
                    synchronized (writeLock) {
                        writeLock.notifyAll();
                    }
                }else {
                    num--;
                    if (num<0){
                        num=0;
                    }
                }


//                BytePicMsg yuvPicMsg=new BytePicMsg(copyData,yuvData);
//                cacheLinkedDeque1.add(yuvPicMsg);
//                synchronized (writeLock1) {
//                    writeLock1.notifyAll();
//                }
//                Log.i("tiwolf", "setOutByte: 收入链表所需时间"+(System.currentTimeMillis()-start) );


//                long start=System.currentTimeMillis();
//
//                iByteArray.setByteArray(NV12ToNV21(outData,nv21Data,width,height),width,height);
////                InteflyOpenGLView.NV12ToNV21(outData,outData.length,nv21Data,width,height);
////                iByteArray.setByteArray(nv21Data,width,height);
//                Log.i("tiwolf", "setOutByte: 转换成nv21所需时间"+(System.currentTimeMillis()-start) );
//                long start2=System.currentTimeMillis();
//                InteflyOpenGLView.NV12ToYUV420P(copyData,copyData.length,yuvData,width,height);
//                iByteArray.setyuvArray(yuvData,height,width);
                long start2= System.currentTimeMillis();
                NV12ToYuv420P(copyData,yuvData,width,height);
                if (System.currentTimeMillis()-start2<=25)
                    iByteArray.setyuvArray(yuvData,width,height);
//                Log.i("tiwolf", "setOutByte: 解码一帧所需时间"+(System.currentTimeMillis()-start) );
//                Log.i("tiwolf", "setOutByte: 视频刷新所需时间"+(System.currentTimeMillis()-start2) );

            }
        }

    }

//    private static int length=0;
//    static byte[] nv21=null;
    //TODO 这个会导致很多的gc回收,但是它本身处理不需要多少时间
    private static byte[] NV12ToNV21(byte[] data,int width,int height){

//        if (length==0 || length==data.length){
//            nv21=new byte[data.length];
//            length=data.length;
//        }
        byte[] nv21=new byte[data.length];
        int framesize=width*height;
        int i=0,j=0;
        System.arraycopy(data,0,nv21,0,framesize);
        for ( i = 0; i < framesize; i++) {
            nv21[i]=data[i];
        }
        for (j = 0; j < framesize/2; j+=2) {
            nv21[framesize+j-1]=data[j+framesize];
        }
        for (j = 0; j < framesize/2; j+=2) {
            nv21[framesize+j]=data[j+framesize-1];
        }
        return nv21;
    }

    private static byte[] NV12ToNV21(byte[] data,byte[] nv21,int width,int height){

//        byte[] nv21=new byte[data.length];
        int framesize=width*height;
        int i=0,j=0;
//        System.arraycopy(data,0,nv21,0,framesize);
//        for ( i = 0; i < framesize; i++) {
//            nv21[i]=data[i];
//        }
        for (j = 0; j < framesize/2; j+=2) {
            byte xin=nv21[framesize+j-1];
            nv21[framesize+j-1]=data[j+framesize];
            nv21[framesize+j]=xin;
        }
        return nv21;
    }



    public static byte[] NV12ToYuv420P(byte[] nv12,int width,int height) {

        byte[] yuv420p=new byte[nv12.length];
        int ySize = width * height;

        int i, j;

//y
        for (i =0; i < ySize; i++) {
            yuv420p[i] = nv12[i];
        }

//u
        i =0;
        for (j =0; j < ySize /2; j +=2) {
            yuv420p[ySize + i] = nv12[ySize + j];
            i++;
        }

//v
        i =0;
        for (j =1; j < ySize /2; j+=2) {
            yuv420p[ySize *5 /4 + i] = nv12[ySize + j];
            i++;
        }

        return yuv420p;
    }

    public  byte[] NV12ToYuv420P(byte[] nv12,byte[] yuv420p,int width,int height) {

//        byte[] yuv420p=new byte[nv12.length];
        int ySize = width * height;

        int i, j;

//y
//        for (i =0; i < ySize; i++) {
//            yuv420p[i] = nv12[i];
//        }

//u
        i =0;
        for (j =0; j < ySize /2; j +=2) {
            yuv420p[ySize + i] = nv12[ySize + j];
            yuv420p[ySize *5 /4 + i] = nv12[ySize + j+1];
            i++;
        }

//v
//        i =0;
//        for (j =1; j < ySize /2; j+=2) {
//
//            i++;
//        }

        return yuv420p;
    }


    /**
    *停止解码，释放解码器
    */
    public void stopCodec() {

        try {
            mCodec.stop();
            mCodec.release();
            surfaceView=null;
            mCodec = null;
            isFirst = true;
        } catch (Exception e) {
            e.printStackTrace();
            mCodec = null;
        }

//        cacheLinkedDeque.clear();
//
//        if (picWorker != null) {
//            picWorker.interrupt();
//            try {
//                picWorker.join();
//            } catch (InterruptedException e) {
//                e.printStackTrace();
//                picWorker.interrupt();
//            }
//            picWorker = null;
//        }
    }

    IByteArray iByteArray;
    public interface IByteArray{
        void setByteArray(byte[] flag, int width, int height);
        void setyuvArray(byte[] yuv, int width, int height);
    }
    public void setiByteArray(IByteArray iByteArray){
        this.iByteArray=iByteArray;
    }


    private Thread picWorker;
    private final Object writeLock = new Object();
    private ConcurrentLinkedDeque<BytePicMsg> cacheLinkedDeque=new ConcurrentLinkedDeque<>();

    //这个处理nv21人脸识别
    public void doPicWork(){

        picWorker=new Thread(new Runnable() {
            @Override
            public void run() {
                long dolongTime=0;      //当上一帧图片处理时间太长的时候，直接跳过下一帧 隔帧处理
                while (!Thread.interrupted()) {
                    while (!cacheLinkedDeque.isEmpty()) {
//                        Log.i(TAG, "run: 将nv12处理成nv21" );
                        long start= System.currentTimeMillis();
                        BytePicMsg source=cacheLinkedDeque.poll();
                        if (source.origin.length>0){
                            NV12ToNV21(source.origin,source.output,width,height);
                            if ((System.currentTimeMillis()-start)>10 || dolongTime>50){
//                                Log.i("tiwolf", "run: 上一次人脸识别大于50毫秒,或者nv12转换nv21大于10毫秒，不处理这一帧" );

                            }else {
//                                Log.i("tiwolf", "run: 上一次人脸识别大于50毫秒，不处理这一帧" );
                                iByteArray.setByteArray(source.output,width,height);
                            }

//                            Log.i("tiwolf", "setOutByte: 上一次转换成nv21人脸识别之后所需时间"+(System.currentTimeMillis()-start) );
                            dolongTime= System.currentTimeMillis()-start;

                        }

                    }
                    // Waiting for next frame
                    synchronized (writeLock) {
                        try {
                            // isEmpty() may take some time, so we set timeout to detect next frame
                            writeLock.wait(500);
                        } catch (InterruptedException ie) {
                            picWorker.interrupt();
                        }
                    }
                }
            }
        });
        picWorker.start();
    }

    private Thread picWorker1;
    private final Object writeLock1 = new Object();
    private ConcurrentLinkedDeque<BytePicMsg> cacheLinkedDeque1=new ConcurrentLinkedDeque<>();

    public void doPicWork1(){
//        Log.i("tiwolf", "doPicWork: 启动nv21数据处理" );
        picWorker1=new Thread(new Runnable() {
            @Override
            public void run() {

                while (!Thread.interrupted()) {
                    while (!cacheLinkedDeque1.isEmpty()) {
//                        Log.i(TAG, "run: 将nv12处理成nv21" );
                        long start= System.currentTimeMillis();
                        BytePicMsg source=cacheLinkedDeque1.poll();
                        if (source.origin.length>0){
                            NV12ToYuv420P(source.origin,source.output,width,height);
                            iByteArray.setyuvArray(source.output,height,width);
                        }
//                        Log.i("tiwolf", "setOutByte: 转换成yuv420所需时间"+(System.currentTimeMillis()-start) );

                    }
                    // Waiting for next frame
                    synchronized (writeLock1) {
                        try {
                            // isEmpty() may take some time, so we set timeout to detect next frame
                            writeLock1.wait(500);
                        } catch (InterruptedException ie) {
                            picWorker1.interrupt();
                        }
                    }
                }
            }
        });
        picWorker1.start();
    }


    class BytePicMsg{
        byte[] origin;
        byte[] output;
        public BytePicMsg(byte[] origin,byte[] output){
            this.origin=origin;
            this.output=output;
        }

    }
}
