package com.zhg.videorecord.encoder;

import android.annotation.SuppressLint;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.util.Log;

import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;

public class MediaCodecBuffEncoder2 {

    private MediaCodec mediaCodec;
    private int m_width;
    private int m_height;
    private byte[] configByte = null;
    private BufferedOutputStream bos;
    private long generateIndex = 0;
    public static final String MIME_TYPE = "video/avc";
    // H.264 Advanced Video
    private static final String TAG="MediaCodecBuffEncoder";

    @SuppressLint("NewApi")
    public MediaCodecBuffEncoder2(int width, int height, int frameRate, int bitRate, String outPath) throws IOException {

        m_width = width;
        m_height = height;
        mBufferInfo = new MediaCodec.BufferInfo();
        this.bos = new BufferedOutputStream(new FileOutputStream(new File(outPath), false));
        mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
        MediaFormat mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width, height);
        mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
        mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
        // 设置输出视频流尺寸，采样率
        //planer:先存完y，再存u，v，package：uv交叉
        //yuv420p:存储顺序是先存Y，再存U，再UV交替存储。YYYY UUU VV
        //yuv420sp:YYYYY UVUVUVUV
        // YV12:存储顺序是先存Y，再存V，最后存U。YYYVVVUUU
        //NV21:存储顺序是先存Y，再存U，再VU交替存储。YYYY VUVUVU
        //COLOR_FormatYUV420PackedSemiPlanar =NV21
        //onPreviewFrame输出的YUV帧格式，如果没有配置任何参数的情况下，基本都是NV21格式
        mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
        // 关键帧间隔时间 单位s，设置为0，则没一个都是关键帧
        mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
       // mediaFormat.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileHigh);
      //  mediaFormat.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel51);
        Log.d("", "format: " + mediaFormat);
        mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, 44100);
        mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);

        mediaFormat.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_VIDEO_AVC);
        mediaFormat.setInteger(MediaFormat.KEY_WIDTH,width);
        mediaFormat.setInteger(MediaFormat.KEY_HEIGHT,height);
        mediaFormat.setInteger(MediaFormat.KEY_CAPTURE_RATE,frameRate);
        mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mediaCodec.start();

    }

    @SuppressLint("NewApi")
    public void close() {
        try {
            mediaCodec.stop();
            mediaCodec.release();
            bos.flush();
            bos.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    private String rawPath="/mnt/internal_sd/DCIM/test.yuv";
    private void saveRaw(byte[] data){
        try {
            BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(new FileOutputStream(new File(rawPath), false));
        bufferedOutputStream.write(data, 0, data.length);
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
    private MediaCodec.BufferInfo mBufferInfo;
    @SuppressLint("NewApi")
    public void offerEncoder2(byte[] input){
            //saveRaw(input);
            int inputBuffIndex=mediaCodec.dequeueInputBuffer(-1);
            Log.i(TAG,"inputBuffIndex="+inputBuffIndex);
            if (inputBuffIndex>=0){

                ByteBuffer inputBuffer= mediaCodec.getInputBuffer(inputBuffIndex);
                if (inputBuffer!=null){
                    inputBuffer.clear();
                   byte[] data_420sp = new byte[input.length];
                   // MediaCodecUtil.converNV21ToNV12(input, data_420sp, m_width, m_height);
                    input = data_420sp;
                    inputBuffer.put(input);
                    mediaCodec.queueInputBuffer(inputBuffIndex,0,input.length,computePresentationTime(generateIndex++),0);
                }
            }
            MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
            int outputBuffIndex=mediaCodec.dequeueOutputBuffer(bufferInfo,12000);
         if (outputBuffIndex>=0){
                Log.i(TAG,"outputBuffIndex="+outputBuffIndex);
                ByteBuffer outputBuffer = mediaCodec.getOutputBuffer(outputBuffIndex);
                MediaFormat bufferFormat = mediaCodec.getOutputFormat(outputBuffIndex);
                if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                    mBufferInfo.size = 0;
                }
                if (mBufferInfo.size != 0) {
                    // adjust the ByteBuffer values to match BufferInfo (not needed?)
                    outputBuffer.position(mBufferInfo.offset);
                    outputBuffer.limit(mBufferInfo.offset + mBufferInfo.size);

                }
                byte[] outData = new byte[bufferInfo.size];
                outputBuffer.get(outData);
                try {
                    bos.write(outData, 0, outData.length);
                } catch (IOException e) {
                    e.printStackTrace();
                }

                mediaCodec.releaseOutputBuffer(outputBuffIndex, false);
                if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                  //  break;      // out of while
                    Log.i(TAG, "BUFFER_FLAG_END_OF_STREAM");
                }
            }else if (outputBuffIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
             // Subsequent data will conform to new format.
             // Can ignore if using getOutputFormat(outputBufferId)
             Log.i(TAG, "INFO_OUTPUT_FORMAT_CHANGED");
        }


    }
    /**
     * @param input yuv420p格式的内容
     */
    @SuppressLint("NewApi")
    public void offerEncoder(byte[] input) {

        try {
            //返回值是缓冲区的BufferId，假如返回值为-1则表示缓冲区不能使用。
            // 传入的参数为正，则是最长等待时间，为0则会立即返回缓冲区的id，负数则会无限等待
            int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
            if (inputBufferIndex >= 0) {
                //返回一个已清空、可写入的input缓冲区,通过调用ByteBuffer.put(data)方法将data中的数据放到缓冲区后，也可以进行其他处理
                ByteBuffer inputBuffer = mediaCodec.getInputBuffer(inputBufferIndex);
                if (inputBuffer != null) {
                    inputBuffer.clear();
                    byte[] data_420sp = new byte[input.length];
                   // MediaCodecUtil.converNV21ToNV12(input, data_420sp, m_width, m_height);
                    input = data_420sp;
                    inputBuffer.put(input);
                    System.out.println("输入" + generateIndex + "帧");
                }
                //将inputBuffer缓冲区返回给codec进行处理。index是回调函数中返回的index，offset是缓冲区提交数据的起始未知，可以不从0开始，size是需要提交的长度，
                // presentationTimeUs是时间戳，这个时间戳最好是按帧率来计算(单位:ns)，当使用surface作为输出时，这个时间会作为食品的时间戳来显示；
                // flags一般三个值：BUFFER_FLAG_CODEC_CONFIG:配置信息，BUFFER_FLAG_END_OF_STREAM:结束标志，BUFFER_FLAG_KEY_FRAME:关键帧，不建议使用。
                // 在执行此方法后index指向的缓冲区将不可访问，继续使用将会抛出异常。
                mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, computePresentationTime(generateIndex++), 0);
            }

            MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
          //  getColorSpace();
            int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 12000);
            while (outputBufferIndex >= 0) {
                System.out.println("outputBufferIndex=" + outputBufferIndex + "");
                //获得一个只读的缓冲区，当使用完毕后，请调用releaseOutputBuffer方法来将缓冲区返回给codec
                ByteBuffer outputBuffer = mediaCodec.getOutputBuffer(outputBufferIndex);
                byte[] outData = new byte[bufferInfo.size];
                outputBuffer.get(outData);
                if (bufferInfo.flags == MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
                    configByte = new byte[bufferInfo.size];
                    configByte = outData;
                } else if (bufferInfo.flags == MediaCodec.BUFFER_FLAG_KEY_FRAME) {
                    byte[] keyframe = new byte[bufferInfo.size + configByte.length];
                    System.arraycopy(configByte, 0, keyframe, 0, configByte.length);
                    System.arraycopy(outData, 0, keyframe, configByte.length, outData.length);

                    bos.write(keyframe, 0, keyframe.length);
                } else {
                    bos.write(outData, 0, outData.length);
                }
                mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
                outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 12000);
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }



    /**
     * Generates the presentation time for frame N, in microseconds.
     */
    private long computePresentationTime(long frameIndex) {
        return 132 + frameIndex * 1000000 / 30;
    }



}
