package mediacodec;

import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.Context;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.media.MediaRecorder;
import android.os.Build;
import android.os.Environment;
import android.support.annotation.RequiresApi;
import android.util.Log;

import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;

/**
 * @author:ike
 * @function: 视频编码器
 * @creat time:11:24 2017/12/22
 **/

public class VideoEncoder {
    private final Context context;
    private String Tag = "VideoEncoder";
    private MediaCodec mediaCodec;
    //视频h264编码
    private final static String MINE_TYPE = "video/avc";
    //音频aac编码
    private final static String mine_type_audio = "audio/mp4a-latm";
    //音视频混合器
    private MediaMuxer mediaMuxer;
    public String savePath;
    private int maxInputSize;
    private boolean isAudioRun;


    @SuppressLint("NewApi")
    public VideoEncoder(Context context) {
        this.context = context;
        mediaCodec = getVideoMediaCodec();
        audiaMediaCodec = getAudioCodec();
        savePath = getPath("video/", "ike_" + System.currentTimeMillis() + ".mp4");
        Log.e(Tag, "保存路径:" + savePath);
        try {
            mediaMuxer = new MediaMuxer(savePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);

        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
    //音频相关信息
    private AudioRecord mAudioRecord;
    private MediaCodec audiaMediaCodec;

    /**
     * 设置音频编码参数
     *
     * @return
     */
    @TargetApi(Build.VERSION_CODES.JELLY_BEAN)
    public MediaCodec getAudioCodec() {
        MediaFormat format = MediaFormat.createAudioFormat(mine_type_audio, 44100, 1);
        format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
        format.setInteger(MediaFormat.KEY_BIT_RATE, 64 * 1024);
        format.setInteger(MediaFormat.KEY_SAMPLE_RATE, 44100);

        format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize);
        format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);

        MediaCodec mediaCodec = null;
        try {
            mediaCodec = MediaCodec.createEncoderByType(mine_type_audio);
            mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        } catch (Exception e) {
            e.printStackTrace();
            if (mediaCodec != null) {
                mediaCodec.stop();
                mediaCodec.release();
                mediaCodec = null;
            }
        }
        //录音器的配置
        maxInputSize = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT);
        mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, 44100, AudioFormat.CHANNEL_IN_STEREO,
                AudioFormat.ENCODING_PCM_16BIT, maxInputSize);
        return mediaCodec;
    }

    /**
     * 设置视频编码器参数
     */
    @TargetApi(Build.VERSION_CODES.JELLY_BEAN)
    public MediaCodec getVideoMediaCodec() {
        int videoWidth = MediaCodecActivity.IMAGE_WIDTH;
        int videoHeight = MediaCodecActivity.IMAGE_HEIGHT;
        MediaFormat format = MediaFormat.createVideoFormat(MINE_TYPE, videoWidth, videoHeight);
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
        format.setInteger(MediaFormat.KEY_BIT_RATE, 1300 * 1024);
        int fps = 15;
        //设置摄像头预览帧率

        format.setInteger(MediaFormat.KEY_FRAME_RATE, fps);
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 2);
        format.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR);
        format.setInteger(MediaFormat.KEY_COMPLEXITY, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR);
        MediaCodec mediaCodec = null;

        try {
            mediaCodec = MediaCodec.createEncoderByType(MINE_TYPE);
            mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        } catch (Exception e) {
            e.printStackTrace();
            if (mediaCodec != null) {
                mediaCodec.stop();
                mediaCodec.release();
                mediaCodec = null;
            }
        }
        return mediaCodec;
    }

    /**
     * 视频编码
     */
    private byte[] sps;
    private byte[] pps;
    private byte[] data;
    public void fireVideo(byte[] data){
        this.data=data;
    }
    /**
     * NV21 is a 4:2:0 YCbCr, For 1 NV21 pixel: YYYYYYYY VUVU I420YUVSemiPlanar
     * is a 4:2:0 YUV, For a single I420 pixel: YYYYYYYY UVUV Apply NV21 to
     * I420YUVSemiPlanar(NV12) Refer to https://wiki.videolan.org/YUV/
     */
    private void NV21toI420SemiPlanar(byte[] nv21bytes, byte[] i420bytes,
                                      int width, int height) {
        System.arraycopy(nv21bytes, 0, i420bytes, 0, width * height);
        for (int i = width * height; i < nv21bytes.length; i += 2) {
            i420bytes[i] = nv21bytes[i + 1];
            i420bytes[i + 1] = nv21bytes[i];
        }
    }

    byte[] mFrameData;
    @SuppressLint("NewApi")
    @TargetApi(Build.VERSION_CODES.JELLY_BEAN)
    public boolean fireVideo() {
        if (data==null||data.length==0){
            Log.e(Tag,"data==null");
            return false;
        }
        //获取输入输出缓冲队列
        ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
        ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
        //获取有效输入缓存数组的下标(从输入缓存队列中取出一个数据进行编码操作)(-1 表示无限 0 表示不等待 大于0 表示等待的时间)
        int index = mediaCodec.dequeueInputBuffer(-1);
        if (index >= 0) {
            //获取缓存
            ByteBuffer inputBuffer = inputBuffers[index];
            inputBuffer.clear();
            if (mFrameData==null){
                mFrameData =new byte[MediaCodecActivity.IMAGE_WIDTH
                        *MediaCodecActivity.IMAGE_HEIGHT * 3 / 2];
            }
            NV21toI420SemiPlanar(data, mFrameData, MediaCodecActivity.IMAGE_WIDTH,
                    MediaCodecActivity.IMAGE_HEIGHT);
            inputBuffer.put(mFrameData);
            //将数据加入编码队列
            mediaCodec.queueInputBuffer(index, 0, mFrameData.length, (System.nanoTime()-startTime)/1000, 0);
        }
        //获取好h264编码后的数据
        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        //获取输出有效缓存下标
        int outputBufferIndex ;
        //Log.e(Tag, "获取输出有效缓存下标：" + outputBufferIndex);
        do {
            outputBufferIndex= mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
            Log.e(Tag,"outputBufferIndex:"+outputBufferIndex);
            //ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
//            byte[] bufferData=new byte[bufferInfo.size];
//            outputBuffer.get(bufferData);
//            Log.e(Tag,"获取到的数据:"+bufferData[4]);
//            if ((bufferData[4] & 0x1f) == 7) { // sps pps MediaCodec会在编码第一帧之前输出sps+pps sps pps加在一起
//                // sps = new byte[outData.length - 4];
//                // System.arraycopy(outData, 4, sps, 0, outData.length - 4);
//                Log.e(Tag, "sps pps:" + Arrays.toString(bufferData));
//                for (int i = 0; i < bufferData.length; i++) {
//                    if (i + 4 < bufferData.length) { // 保证不越界
//                        if (bufferData[i] == 0x00 && bufferData[i + 1] == 0x00
//                                && bufferData[i + 2] == 0x00
//                                && bufferData[i + 3] == 0x01) {
//                            //在这里将sps pps分开
//                            // if ((outData[i + 4] & 0x1f) == 7) { // & 0x1f =7
//                            // sps
//                            //
//                            // } else
//                            //sps pps数据如下: 0x00 0x00 0x00 0x01 7 sps 0x00 0x00 0x00 0x01 8 pps
//                            if ((bufferData[i + 4] & 0x1f) == 8) {// & 0x1f =8 pps
//                                //去掉界定符[0,0,0,1]
//                                sps = new byte[i - 4];
//                                System.arraycopy(bufferData, 4, sps, 0, sps.length);
//                                pps = new byte[bufferData.length
//                                        - (4 + sps.length) - 4];
//                                System.arraycopy(bufferData, 4 + sps.length + 4,
//                                        pps, 0, pps.length);
//                                break;
//                            }
//                        }
//                    }
//                }
//                Log.e(Tag, "sps :" + Arrays.toString(sps));
//                Log.e(Tag, "pps :" + Arrays.toString(pps));
//            }
//            //释放编码后的数据
//            mediaCodec.releaseOutputBuffer(outputBufferIndex,false);
//            // 重新获得编码bytebuffer下标
//            outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
//            Log.e(Tag,"完成 输出:" + outputBufferIndex);


            /////////////////编码混合视频
            if (outputBufferIndex >= 0) {
                ByteBuffer outputBuffer = mediaCodec.getOutputBuffer(outputBufferIndex);
                if (//audioTrackNum >= 0 &&
                        videoTrackNum >= 0 && bufferInfo.size > 0 && bufferInfo.presentationTimeUs > 0) {
                    mediaMuxer.writeSampleData(videoTrackNum, outputBuffer, bufferInfo);
                } else {
                    Log.e(Tag, "没有可以混合的视频数据");
                }
                //释放资源
                mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
                outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
                if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    Log.e(Tag, "video end");
                    return true;
                }
            } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                //数据变换，开始视频混合
                videoTrackNum = mediaMuxer.addTrack(mediaCodec.getOutputFormat());
                Log.e(Tag, "videoTrackNum:" + videoTrackNum + ",audioTrackNum:" + audioTrackNum);
                if (videoTrackNum >= 0 
                      //  && audioTrackNum >= 0
                        ) {
                    mediaMuxer.start();
                }
            } else if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
                Log.e(Tag, "稍后再试混合视频数据");
            }
        } while (outputBufferIndex >= 0);

        return false;

    }
    private boolean isVideoRun;
    @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
    public void startEncoder() {
        startTime = System.nanoTime();
        //开始音频编码
        audiaMediaCodec.start();
        //开始开启音频设备获取音频信息
        mAudioRecord.startRecording();
        //开始视频编码
        mediaCodec.start();
        isAudioRun=true;
        isVideoRun=true;
//        new Thread(new Runnable() {
//            @Override
//            public void run() {
//                while (isAudioRun) {
//                    if (fireAudio()){
//                        break;
//                    }
//                }
//            }
//        }).start();
        new Thread(new Runnable() {
            @Override
            public void run() {
                while (isVideoRun) {
                    if (fireVideo()){
                        Log.e(Tag,"录制完毕");
                        break;
                    }
                   
                }  
            }
        }).start();
    }

    private long startTime;//开始时间
    private int audioTrackNum = -1;//音频通道个数
    private int videoTrackNum = -1;//视频通道个数

    /**
     * aac编码音频数据
     */
    @SuppressLint("NewApi")
    @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
    private boolean fireAudio() {
        ByteBuffer[] inputBuffers = audiaMediaCodec.getInputBuffers();
        ByteBuffer[] outputBuffers = audiaMediaCodec.getOutputBuffers();
        int audioIndex = audiaMediaCodec.dequeueInputBuffer(-1);
        if (audioIndex >= 0) {
            ByteBuffer buffer = inputBuffers[audioIndex];
            buffer.clear();
            //填充数据
            int read = mAudioRecord.read(buffer, maxInputSize);
            if (read >= 0) {
                //加入编码队列
                audiaMediaCodec.queueInputBuffer(audioIndex, 0, maxInputSize,
                        (System.nanoTime() - startTime) / 1000, 0);
            }
        }
        //取出编码后的数据
        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        int outputBufferIndex ;
        do {
            outputBufferIndex = audiaMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
            
            if (outputBufferIndex >= 0) {
                ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
                outputBuffer.position(bufferInfo.offset);
                if (audioTrackNum > 0 && videoTrackNum > 0 && bufferInfo.size > 0 && bufferInfo.presentationTimeUs > 0) {
                    mediaMuxer.writeSampleData(audioTrackNum, outputBuffer, bufferInfo);
                } else {
                    Log.e(Tag, "没有可以混合的银屏数据");
                }

                //释放资源
                audiaMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
               // outputBufferIndex = audiaMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
                if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    Log.e(Tag, "audio end");
                    return true;
                }
            } else if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
                Log.e(Tag, "混合音频数据稍后再试");
            } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                if (audiaMediaCodec.getOutputFormat() == null) {
                    Log.e(Tag, "mediaCodec.getOutputFormat()==null");
                }
                if (mediaMuxer == null) {
                    Log.e(Tag, "mediaMuxer==null");
                }
                audioTrackNum = mediaMuxer.addTrack(audiaMediaCodec.getOutputFormat());
                if (audioTrackNum >= 0 && videoTrackNum >= 0) {
                    Log.e(Tag, "开始混合音频数据");
                    mediaMuxer.start();
                }
            }
        } while (outputBufferIndex >= 0);

        return false;
    }

    //获取VideoPath
    public String getPath(String path, String fileName) {
        String p = getBaseFolder() + path;
        File f = new File(p);
        if (!f.exists() && !f.mkdirs()) {
            return getBaseFolder() + fileName;
        }
        return p + fileName;
    }

    private String getBaseFolder() {
        String baseFolder = Environment.getExternalStorageDirectory() + "/Codec/";
        File f = new File(baseFolder);
        if (!f.exists()) {
            boolean b = f.mkdirs();
            if (!b) {
                baseFolder = context.getExternalFilesDir(null).getAbsolutePath() + "/";
            }
        }
        return baseFolder;
    }

    @SuppressLint("NewApi")
    @TargetApi(Build.VERSION_CODES.JELLY_BEAN)
    public void stopEncoder() {

        try {
            isAudioRun=false;
            isVideoRun=false;
            //Audio Stop
            mAudioRecord.stop();
            audiaMediaCodec.stop();
            audiaMediaCodec.release();

            //Video Stop
            mediaCodec.stop();
            mediaCodec.release();

            //Muxer Stop
            videoTrackNum = -1;
            audioTrackNum = -1;
            mediaMuxer.stop();
            mediaMuxer.release();
        } catch (IllegalStateException e) {
            e.printStackTrace();
        }

    }
}
