package com.renfei.multimediatask.ui.task8;

import androidx.annotation.RequiresApi;
import androidx.appcompat.app.AppCompatActivity;

import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.view.TextureView;
import android.view.View;
import android.widget.Button;
import android.widget.RelativeLayout;

import com.renfei.multimediatask.R;
import com.renfei.multimediatask.base.BaseActivity;
import com.renfei.multimediatask.util.Camera1Loader;
import com.renfei.multimediatask.util.Camera2Loader;
import com.renfei.multimediatask.util.CameraLoader;
import com.renfei.multimediatask.util.YUVDataUtil;

import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import java.util.concurrent.ArrayBlockingQueue;

import butterknife.BindView;
import butterknife.OnClick;

public class VideoRecordEncodecH264Activity extends BaseActivity {

    @BindView(R.id.surfaceView)
    TextureView surfaceView;
    @BindView(R.id.startRecord)
    Button startRecord;
    @BindView(R.id.stopRecord)
    Button stopRecord;
    @BindView(R.id.ll_btn_layout)
    RelativeLayout llBtnLayout;
    private CameraLoader cameraLoader;


    private ArrayBlockingQueue<byte[]> queue;

    private String targetFilePath;

    private VideoEncodecThread videoEncodecThread;


    private int recordWidth;
    private int recordHeight;

    private int preViewWidth = 360;
    private int preViewHeight = 640;

    @Override
    protected int getLayoutId() {
        return R.layout.activity_video_record_encodec_h264;
    }

    @Override
    protected void initListener() {
        queue = new ArrayBlockingQueue<>(1024);
        targetFilePath = getParentFilePath() + "video_encodec.mp4";

        videoEncodecThread = new VideoRecordEncodecH264Activity.VideoEncodecThread(new WeakReference<>(this));

        if (Build.VERSION.SDK_INT < 21) {
            cameraLoader = new Camera1Loader(this);
        } else {
            cameraLoader = new Camera2Loader(this);
        }


        cameraLoader.setOnPreviewFrameListener(new CameraLoader.OnPreviewFrameListener() {
            @Override
            public void onPreviewFrame(byte[] data, int width, int height) {
                //获取到NV21数据后，我们对其进行编码
//                Log.e("onPreviewFrame", data.length + "   width  =" + width + "  height  =" + height);
                if (null != videoEncodecThread && videoEncodecThread.isEncodec) {
                    try {
                        queue.put(data);
                    } catch (InterruptedException e) {
//                        loge("onPreviewFrame", "queue put data exception");
                        e.printStackTrace();
                    }
                }
            }
        });
        surfaceView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
            @Override
            public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
                loge("onSurfaceTextureAvailable", "width    =" + width);
                loge("onSurfaceTextureAvailable", "height    =" + height);
                cameraLoader.onCreate(preViewWidth, preViewHeight, new CameraLoader.OnPreviewSelectListener() {
                    @Override
                    public void onPreviewSizeSelect(int pwidth, int pheight) {
                        loge("onPreviewSizeSelect", "width    =" + pwidth);
                        loge("onPreviewSizeSelect", "height    =" + pheight);
                        recordWidth = pwidth;
                        recordHeight = pheight;
                        videoEncodecThread.initEncodec(recordWidth, recordHeight);
                    }
                });
                cameraLoader.startPreViewTextureView(surfaceTexture);

            }

            @Override
            public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {

            }

            @Override
            public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
                return false;
            }

            @Override
            public void onSurfaceTextureUpdated(SurfaceTexture surface) {

            }
        });

    }

    @OnClick({R.id.startRecord, R.id.stopRecord})
    public void onViewClicked(View view) {
        switch (view.getId()) {
            case R.id.startRecord:
                startRecording();
                break;
            case R.id.stopRecord:
                stopRecording();
                break;
        }
    }

    private void stopRecording() {
        if (null != videoEncodecThread) {
            videoEncodecThread.stopEncodec();
        }
    }

    private void startRecording() {

        if (null != videoEncodecThread && videoEncodecThread.isEncodec) {
            return;
        }

        try {
            File file = new File(targetFilePath);
            if (file.exists()) {
                file.delete();
            }
            file.createNewFile();
            videoEncodecThread.start();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }


    @Override
    protected void onPause() {
        super.onPause();
        if (null != cameraLoader) {
            cameraLoader.onPause();
        }
    }


    static class VideoEncodecThread extends Thread {
        private static final boolean VERBOSE = true;
        private WeakReference<VideoRecordEncodecH264Activity> weakReference;

        private static final String TAG = VideoRecordEncodecH264Activity.VideoEncodecThread.class.getSimpleName();

        //视频编码
        private MediaCodec videoEncodec;
        private MediaFormat videoFormat;


        //是否正在编码
        private boolean isEncodec = false;


        private int width;
        private int height;

        //等待队列超时时间
        private int TIMEOUT_USEC = 12000;


        /**
         * 编码输入是否结束的标志
         */
        private boolean isOES = false;

        /**
         * 视频合成器
         */
        private MediaMuxer mediaMuxer;
        //视频合成器是否开始工作
        private boolean mMuxerStarted;
        private int videoTrackIndex = -1;
        private long pts;

        public VideoEncodecThread(WeakReference<VideoRecordEncodecH264Activity> weakReference) {
            this.weakReference = weakReference;

        }

        public void initEncodec(int width, int height) {
            Log.e(TAG, "init Encodec   width    =" + width + "  height  =" + height);
            this.width = width;
            this.height = height;
            initEncodec();
        }


        private void initEncodec() {
            try {

                videoFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width, height);

                videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                        MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
//                        MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
                videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 25);  //每秒25帧
                videoFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR);
                videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, width * height * 5);
                videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);//设置关键帧

                videoEncodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
                videoEncodec.configure(videoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
                videoEncodec.start();

                mediaMuxer = new MediaMuxer(weakReference.get().targetFilePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);

                videoTrackIndex = -1;
                mMuxerStarted = false;
            } catch (IOException e) {
                e.printStackTrace();
            }
        }


        @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
        @Override
        public void run() {
            super.run();

            isEncodec = true;
            byte[] inputData = null;

            int inputChunk = 0;
            long firstInputTimeNsec = -1;


            while (isEncodec) {
                //处理输入
                if (weakReference.get().queue.size() > 0) {
                    inputData = weakReference.get().queue.poll();
                    byte[] yuv420sp = new byte[width * height * 3 / 2];
                    YUVDataUtil.NV21ToNV12(inputData, yuv420sp, width, height);
                    inputData = yuv420sp;
                }


                if (null != inputData) {
                    int inputBufferIndex = videoEncodec.dequeueInputBuffer(-1);
                    if (inputBufferIndex >= 0) {
                        if (firstInputTimeNsec == -1) {
                            firstInputTimeNsec = System.nanoTime();
                        }

                        ByteBuffer byteBuffer = videoEncodec.getInputBuffer(inputBufferIndex);

                        if (isOES) {
                            // End of stream -- send empty frame with EOS flag set.
                            if (inputChunk <= 0) {
                                videoEncodec.queueInputBuffer(inputBufferIndex, 0, 0, 0L,
                                        MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                                if (VERBOSE) Log.d(TAG, "sent input EOS");
                                inputChunk++;
                            }

                            if (inputChunk > 0) {
                                isEncodec = false;
                            }

                        } else {
                            byteBuffer.clear();
                            byteBuffer.put(inputData, 0, inputData.length);

                            videoEncodec.queueInputBuffer(inputBufferIndex, 0,
                                    inputData.length,
                                    getPTSUs(), 0
                            );
                        }

                    } else {
                        if (VERBOSE) Log.d(TAG, "input buffer not available");
                    }
                }
                //处理输出
                MediaCodec.BufferInfo videoBufferinfo = new MediaCodec.BufferInfo();
                int outputBufferIndex = videoEncodec.dequeueOutputBuffer(videoBufferinfo, TIMEOUT_USEC);

                if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                    Log.e(TAG, "MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: ");
                    if (mMuxerStarted) {
                        throw new RuntimeException("format changed twice");
                    }

                    if (null != mediaMuxer) {
                        videoTrackIndex = mediaMuxer.addTrack(videoEncodec.getOutputFormat());
                        Log.e(TAG, "videoTrackIndex: " + videoTrackIndex);
                        mediaMuxer.start();
                        mMuxerStarted = true;
                        Log.e(TAG, " mediaMuxer.start() ");
                    }

                } else {
                    while (outputBufferIndex > 0) {
                        ByteBuffer outputBuffer = videoEncodec.getOutputBuffer(outputBufferIndex);
                        if (outputBuffer == null) {
                            throw new RuntimeException("encoderOutputBuffer " + outputBufferIndex +
                                    " was null");
                        }

                        if ((videoBufferinfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                            // The codec config data was pulled out and fed to the muxer when we got
                            // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                            if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
                            videoBufferinfo.size = 0;
                        }

                        if (videoBufferinfo.size != 0) {
                            if (!mMuxerStarted) {
                                throw new RuntimeException("muxer hasn't started");
                            }

                            // adjust the ByteBuffer values to match BufferInfo (not needed?)
                            outputBuffer.position(videoBufferinfo.offset);
                            outputBuffer.limit(videoBufferinfo.offset + videoBufferinfo.size);

                            mediaMuxer.writeSampleData(videoTrackIndex, outputBuffer, videoBufferinfo);
                            if (VERBOSE) {
                                Log.d(TAG, "sent " + videoBufferinfo.size + " bytes to muxer, ts=" + videoBufferinfo.presentationTimeUs);
                            }
                        }

                        videoEncodec.releaseOutputBuffer(outputBufferIndex, false);

                        if ((videoBufferinfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                            if (!isOES) {
                                Log.w(TAG, "reached end of stream unexpectedly");
                            } else {
                                if (VERBOSE) Log.d(TAG, "end of stream reached");
                            }
                            break;      // out of while
                        }
                        outputBufferIndex = videoEncodec.dequeueOutputBuffer(videoBufferinfo, TIMEOUT_USEC);
                    }
                }


            }

            isEncodec = false;

            videoEncodec.stop();
            videoEncodec.release();
            videoEncodec = null;


            mediaMuxer.stop();
            Log.e(TAG, "mediaMuxer: stop ");
            mediaMuxer.release();
            Log.e(TAG, "mediaMuxer: release ");
            mediaMuxer = null;

            Log.e(TAG, "videoRecordActivity:  录制完成");
        }

        /**
         * previous presentationTimeUs for writing
         */
        private long prevOutputPTSUs = 0;

        /**
         * get next encoding presentationTimeUs
         *
         * @return
         */
        protected long getPTSUs() {
            long result = System.nanoTime() / 1000L;
            if (result < prevOutputPTSUs)
                result = (prevOutputPTSUs - result) + result;
            return result;
        }


        public void stopEncodec() {
            isOES = true;
        }

        public void release() {

            Log.e(TAG, "videoRecordActivity:  release");
            if (null != videoEncodec) {

                videoEncodec.stop();
                videoEncodec.release();
                videoEncodec = null;
            }
            if (null != mediaMuxer) {
                mediaMuxer.stop();
                mediaMuxer.release();
                mediaMuxer = null;
            }
        }
    }
}
