package com.mxchip.livestarmobile.mobile.util.Camera;

import android.annotation.SuppressLint;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;

import androidx.annotation.RequiresApi;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.concurrent.LinkedBlockingQueue;


public class AvcEncoder {
    private final static String TAG = "MeidaCodec";

    private int TIMEOUT_USEC = 1000;

    private MediaCodec mediaCodec;
    int m_width;
    int m_height;
    int m_framerate;
    public byte[] configbyte;
    private byte[] information;
    public static LinkedBlockingQueue<byte[]> YUVQueue = new LinkedBlockingQueue<>(100);
    public AvcEncoder(int width, int height, int framerate, int bitrate){
        this( width,  height,  framerate,  bitrate,10);
    }
    @SuppressLint("NewApi")
    public AvcEncoder(int width, int height, int framerate, int bitrate, int iFrameInterval) {

        m_width = width;
        m_height = height;
        m_framerate = framerate;

        int color_formate = 21;//getSupportColorFormat();

        MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);
        mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, color_formate);
        mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, width * height * 3 / 2);
        mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
        mediaFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR);
        mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, framerate);
        mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval);
        mediaFormat.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline);
        mediaFormat.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel1);
        try {
            mediaCodec = MediaCodec.createEncoderByType("video/avc");
            mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            mediaCodec.start();
        } catch (Exception e) {
            e.printStackTrace();
            System.exit(0);
        }

    }
    @RequiresApi(api = Build.VERSION_CODES.KITKAT)
    private void init(){
        final String MINE = MediaFormat.MIMETYPE_VIDEO_AVC;
        MediaFormat format;
        try {
            mediaCodec = MediaCodec.createEncoderByType(MINE);
            format = MediaFormat.createVideoFormat(MINE,1280,720);
            format.setInteger(MediaFormat.KEY_BIT_RATE,1200000);
            format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar );
            format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
            format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 0);

            mediaCodec.configure(format,null,null,MediaCodec.CONFIGURE_FLAG_ENCODE);
            mediaCodec.start();
            changeBitRate(1024);
        } catch (IOException e) {
            e.printStackTrace();
        }

    }
    private int getSupportColorFormat() {
        int numCodecs = MediaCodecList.getCodecCount();
        MediaCodecInfo codecInfo = null;
        for (int i = 0; i < numCodecs && codecInfo == null; i++) {
            MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
            if (!info.isEncoder()) {
                continue;
            }
            String[] types = info.getSupportedTypes();
            boolean found = false;
            for (int j = 0; j < types.length && !found; j++) {
                if (types[j].equals("video/avc")) {
                    System.out.println("found");
                    found = true;
                }
            }
            if (!found)
                continue;
            codecInfo = info;
        }

        Log.d("AvcEncoder", "Found " + codecInfo.getName() + " supporting " + "video/avc");

        // Find a color profile that the codec supports
        MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType("video/avc");
        Log.d("AvcEncoder",
                "length-" + capabilities.colorFormats.length + "==" + Arrays.toString(capabilities.colorFormats));

        for (int i = 0; i < capabilities.colorFormats.length; i++) {

            switch (capabilities.colorFormats[i]) {
                case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
                case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
                case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
                case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
                case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
                case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:

                    Log.d("AvcEncoder", "supported color format::" + capabilities.colorFormats[i]);
                    return capabilities.colorFormats[i];
                default:
                    Log.e("AvcEncoder", "unsupported color format " + capabilities.colorFormats[i]);
                    break;
            }
        }
        return -1;
    }

    /* private static String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/test1.h264";
     private BufferedOutputStream outputStream;
     FileOutputStream outStream;

     private void createfile() {
         File file = new File(path);
         if (file.exists()) {
             file.delete();
         }
         try {
             outputStream = new BufferedOutputStream(new FileOutputStream(file));
         } catch (Exception e) {
             e.printStackTrace();
         }
     }*/

    EcodeH264Callback mH264Callbacll;
    EcodeH264Callback mH264Callbacll2;

    public void setEcodeH264Callback(EcodeH264Callback h264Callback) {
        this.mH264Callbacll = h264Callback;
    }

    public void setEcodeH264Callback2(EcodeH264Callback h264Callback) {
        this.mH264Callbacll2 = h264Callback;
    }

    @SuppressLint("NewApi")
    private void StopEncoder() {
        if (mediaCodec != null)
            try {
                mediaCodec.stop();
                mediaCodec.release();
                mediaCodec = null;
            } catch (Exception e) {
                e.printStackTrace();
            }
    }


    public boolean isRuning = false;

    public void StopThread() {
        isRuning = false;
        StopEncoder();
//            outputStream.flush();
//            outputStream.close();

    }

    private byte[] convertByte = null;
    volatile long startMs;
    volatile long endtime;
int index = 0;
    public void encodeFrame(byte[] byteBuffer,boolean isSharing) {
        try {
            startMs = System.currentTimeMillis();
            if (convertByte == null || convertByte.length != byteBuffer.length) {
                convertByte = new byte[byteBuffer.length];
            }
//            NV21ToNV12(byteBuffer, convertByte, m_width, m_height);
            /**/
            int framesize=m_width*m_height;
            byte nv;
            for (int i = 0; i < framesize / 2; i += 2) {
                nv=byteBuffer[framesize+i];
                byteBuffer[framesize+i]=byteBuffer[framesize+i+1];
                byteBuffer[framesize+i+1]=nv;
            }
            ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
            ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
            int inputBufferIndex = mediaCodec.dequeueInputBuffer(0);
            if (inputBufferIndex >= 0) {
                ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
                inputBuffer.clear();
                inputBuffer.put(byteBuffer, 0, byteBuffer.length);
                mediaCodec.queueInputBuffer(inputBufferIndex, 0, byteBuffer.length, System.nanoTime() / 1000, 0);
            }
            MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
            int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
            while (outputBufferIndex >= 0) {
                byte[] outData = new byte[bufferInfo.size];

                if (bufferInfo.flags == MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
//                    Log.d(TAG, "encodeFrame: SPS/PPS" + System.currentTimeMillis());

                    //sps pps信息
                    outData = new byte[bufferInfo.size];
                    outputBuffers[outputBufferIndex].get(outData);
                    information = outData;
                    ByteBuffer spsByteBuffer = mediaCodec.getOutputFormat().getByteBuffer("csd-0");
                    byte[] sps = new byte[spsByteBuffer.remaining()];
                    spsByteBuffer.get(sps, 0, sps.length);

                    ByteBuffer ppsByteBuffer = mediaCodec.getOutputFormat().getByteBuffer("csd-1");
                    byte[]  pps = new byte[ppsByteBuffer.remaining()];
                    ppsByteBuffer.get(pps, 0, pps.length);

                    Log.d(TAG, "sps:" + byteToHex(sps));
                    Log.d(TAG, "pps:" + byteToHex(pps));
                    Log.d(TAG, "encodeFrame:information " + Arrays.toString(information));
                    if (spsAndPpsDataCa!=null){
                        spsAndPpsDataCa.onDataCallback(sps,pps);
                    }
                } else if (bufferInfo.flags == MediaCodec.BUFFER_FLAG_KEY_FRAME) {
//                    Log.d(TAG, "encodeFrame: KeyFrame" + System.currentTimeMillis());
                    //关键帧
                    outData = new byte[bufferInfo.size + information.length];
                    System.arraycopy(information, 0, outData, 0, information.length);
                    outputBuffers[outputBufferIndex].get(outData, information.length, bufferInfo.size);
                    Log.d(TAG, "encodeFrame:KeyFrame" + outData.length);
                    mH264Callbacll.getH264Data(outData, 0, outData.length,isSharing,true);
                    index = 0;
                } else {
//                    Log.d(TAG, "encodeFrame: NormalFrame" + System.currentTimeMillis());
                    //普通帧
                    outData = new byte[bufferInfo.size];
                    outputBuffers[outputBufferIndex].get(outData);
                    Log.d(TAG, "encodeFrame:NormalFrame" + outData.length+"index:"+index++);
                    mH264Callbacll.getH264Data(outData, 0, outData.length, isSharing,false);
                }
                mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
                outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
            }
            endtime = System.currentTimeMillis();
//            Log.d(TAG, "编码耗时：" + "run: " + (endtime - startMs));
        } catch (Exception t) {
            t.printStackTrace();
        }
    }

    public static String byteToHex(byte[] bytes) {
        StringBuffer stringBuffer = new StringBuffer();
        for (int i = 0; i < bytes.length; i++) {
            String hex = Integer.toHexString(bytes[i]);
            if (hex.length() == 1) {
                stringBuffer.append("0" + hex);
            } else {
                stringBuffer.append(hex);
            }
        }
        return stringBuffer.toString();
    }
    public byte[] swapYV12toI420(byte[] yv12bytes, byte[] i420bytes, int width, int height) {
        for (int i = 0; i < width * height; i++)
            i420bytes[i] = yv12bytes[i];
        for (int i = width * height; i < width * height + (width / 2 * height / 2); i++)
            i420bytes[i] = yv12bytes[i + (width / 2 * height / 2)];
        for (int i = width * height + (width / 2 * height / 2); i < width * height + 2 * (width / 2 * height / 2); i++)
            i420bytes[i] = yv12bytes[i - (width / 2 * height / 2)];
        return i420bytes;
    }

    private void NV21ToNV12(byte[] nv21, byte[] nv12, int width, int height) {
        if (nv21 == null || nv12 == null) return;
        int framesize = width * height;
        int i = 0, j = 0;
        System.arraycopy(nv21, 0, nv12, 0, framesize);
        for (i = 0; i < framesize; i++) {
            nv12[i] = nv21[i];
        }
        for (j = 0; j < framesize / 2; j += 2) {
            nv12[framesize + j - 1] = nv21[j + framesize];
        }
        for (j = 0; j < framesize / 2; j += 2) {
            nv12[framesize + j] = nv21[j + framesize - 1];
        }
    }

    /**
     * Generates the presentation time for frame N, in microseconds.
     */
    private long computePresentationTime(long frameIndex) {
//        return 132 + frameIndex * 1000000 / m_framerate;
        return System.nanoTime() / 1000;
    }

    public interface EcodeH264Callback {

        void getH264Data(byte[] data, int start, int length, boolean isSharing, boolean isKey);
    }


    public void applyIFrame() {
//强制申请I帧
        try {
            Bundle param = new Bundle();
            param.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
            mediaCodec.setParameters(param);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public void changeBitRate(int bitrate) {
        //动态修改比特率
        Bundle param = new Bundle();
        param.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, bitrate);
        mediaCodec.setParameters(param);
    }
    SpsAndPpsDataCallback spsAndPpsDataCa;

    public void setSpsAndPpsDataCa(SpsAndPpsDataCallback spsAndPpsDataCa) {
        this.spsAndPpsDataCa = spsAndPpsDataCa;
    }

    public interface SpsAndPpsDataCallback {
        void onDataCallback(byte[] sps, byte[] pps);
    }
}
