package com.chenjim.glrecorder;

import android.content.Context;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.opengl.EGLContext;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.SystemClock;
import android.util.Log;
import android.view.Surface;
import android.widget.Toast;

import androidx.annotation.NonNull;

import com.chenjim.glrecorder.audio.AudioRecordPcm;
import com.chenjim.glrecorder.audio.PcmEncodeAacCtrl;
import com.chenjim.glrecorder.util.CommonUtils;
import com.chenjim.glrecorder.util.Logger;
import com.chenjim.glrecorder.util.UdpSend;

import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;

/**
 * 录制类
 */
public class MediaRecorder {

    private static final String TAG = "MediaRecorder";
    private final Context mContext;
    private final String mPath;
    private final int mWidth;
    private final int mHeight;
    private final EGLContext mEglContext;
    private MediaCodec mMediaCodec;
    private Surface mInputSurface;
    private MediaMuxer mMuxer;
    private volatile boolean isMediaMuxerStart = false;
    private volatile long startTimeUs = 0;
    private volatile long pauseRecordTimeUs = 0;
    private Handler mHandler;
    private EGLBase mEglBase;
    private volatile boolean isStart;
    private int mVideoTrackIndex = -1;
    //private int audioTrack = -1;
    private float mSpeed;
    private AudioRecordPcm audioRecord;
    private PcmEncodeAacCtrl pcmEncode;
    private boolean isNeedKeyFrame = true;
    private boolean isRecordPause = false;
    // finish write first key frame
    private volatile boolean isWriteFirstKeyFrame = false;

    /**
     * @param context 上下文
     * @param path    保存视频的地址
     * @param width   视频宽
     * @param height  视频高
     *                还可以让人家传递帧率 fps、码率等参数
     */
    public MediaRecorder(Context context, String path, int width, int height, EGLContext eglContext) {
        mContext = context.getApplicationContext();
        mPath = path;
        mWidth = width;
        mHeight = height;
        mEglContext = eglContext;


        initMediaCodec();

        /*
         * 配置EGL环境
         */
        //Handler ： 线程通信
        //Handler: 子线程通知主线程
        //Looper.loop();
        HandlerThread handlerThread = new HandlerThread("VideoCodec");
        handlerThread.start();
        Looper looper = handlerThread.getLooper();
        // 用于其他线程 通知子线程
        mHandler = new Handler(looper);
        //子线程： EGL的绑定线程 ，对我们自己创建的EGL环境的opengl操作都在这个线程当中执行
        mHandler.post(() -> {
            //创建我们的EGL环境 (虚拟设备、EGL上下文等)
            mEglBase = new EGLBase(mContext, mWidth, mHeight, mInputSurface, mEglContext);
        });
    }

    /**
     * 开始录制视频
     *
     * @param speed
     */
    public void start(float speed) {
        mSpeed = speed;
        //isStart = true;


        //pcmEncode = new PcmEncodeAacCtrl();
        //audioRecord = new AudioRecordPcm(new AudioRecordPcmCallback());
        //audioRecord.start();
    }

    private void initMediaCodec() {
        Log.d("mWidth==>", String.valueOf(mWidth));
        Log.d("mHeight==>", String.valueOf(mHeight));
        /*
         * 配置MediaCodec 编码器
         */
        try {
            //视频格式
            // 类型（avc高级编码 h264） 编码出的宽、高
            MediaFormat mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, mWidth, mHeight);
            //颜色格式（RGB\YUV）
            //从surface当中回去
            mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
            //参数配置
            // 1000kbs码率
            mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 1000_000);
            //帧率
            mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 25);
            //关键帧间隔
            mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
            //编码器
            mMediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
            //将参数配置给编码器
            mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            //交给虚拟屏幕 通过opengl 将预览的纹理 绘制到这一个虚拟屏幕中
            //这样MediaCodec 就会自动编码 inputSurface 中的图像
            mInputSurface = mMediaCodec.createInputSurface();
            mMediaCodec.start();
            isStart = true;

            initMuxer();
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }

    private void initMuxer() {
        //Android/data/com.chenjim.glrecorder/cache
        String s1 = "/storage/C402-4DB8";
        String s2 = Environment.getExternalStorageDirectory().toString();
        String outputPath = new File(s2,
                CommonUtils.formatTime(System.currentTimeMillis()) + ".mp4").toString();
        Log.d(TAG, "output file is " + outputPath);
        try {
            mMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
        } catch (IOException ioe) {
            throw new RuntimeException("MediaMuxer creation failed", ioe);
        }
    }

    private long startTime = 0;
    private boolean mMuxerStarted = false;

    private void addMuxerFormat() {
        if (mMuxerStarted) {
            throw new RuntimeException("format changed twice");
        }
        MediaFormat newFormat = mMediaCodec.getOutputFormat();
        // now that we have the Magic Goodies, start the muxer
        mVideoTrackIndex = mMuxer.addTrack(newFormat);
        mMuxer.start();
        mMuxerStarted = true;
        Log.d(TAG, "encoder output format changed: " + newFormat + "===:" + mVideoTrackIndex);

        startTime = System.currentTimeMillis();
    }

    public void splitVideo() {
        stopMuxer();
        restartMuxer();
    }

    //关闭mMuxer，将一段视频保存到文件夹中。
    public void stopMuxer() {
        if (mMuxer != null) {
            mMuxerStarted = false;
            mMuxer.stop();
            mMuxer.release();
            mMuxer = null;
            mVideoTrackIndex = -1;
        }
    }


    //重新开始录制新的视频
    private void restartMuxer() {
        initMuxer();
        addMuxerFormat();
    }


    /**
     * 传递 纹理进来
     * 相当于调用一次就有一个新的图像需要编码
     */
    public void encodeFrame(final int textureId, final long timestamp) {
        /*if (!isStart) {
            return;
        }*/
        mHandler.post(() -> {
            //把图像画到虚拟屏幕
            mEglBase.draw(textureId, timestamp);
            //从编码器的输出缓冲区获取编码后的数据就ok了
            getCodec(false);
        });
    }


    byte[] sps_pps = null;

    /**
     * 获取编码后 的数据
     *
     * @param endOfStream 标记是否结束录制
     */
    private void getCodec(boolean endOfStream) {
        //不录了， 给mediacodec一个标记
        if (endOfStream) {
            mMediaCodec.signalEndOfInputStream();
        }
        //输出缓冲区
        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        // 希望将已经编码完的数据都 获取到 然后写出到mp4文件
        while (true) {
            int status = mMediaCodec.dequeueOutputBuffer(bufferInfo, 10_000);
            //让我们重试  1、需要更多数据  2、可能还没编码为完（需要更多时间）
            if (status == MediaCodec.INFO_TRY_AGAIN_LATER) {
                if (!endOfStream) {
                    break;
                }
                //否则继续
            } else if (status == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                //开始编码 就会调用一次
                addMuxerFormat();

                // 输出格式改变 (通常在开始时)
                MediaFormat newFormat = mMediaCodec.getOutputFormat();
                ByteBuffer spsBuffer = newFormat.getByteBuffer("csd-0"); // 获取SPS/PPS数据
                ByteBuffer ppsBuffer = newFormat.getByteBuffer("csd-1"); // 获取SPS/PPS数据
                if (spsBuffer != null && ppsBuffer != null) {
                    byte[] spsData = new byte[spsBuffer.remaining()];
                    spsBuffer.get(spsData);
                    /* writeBytes(spsData); */// 写入SPS/PPS数据到文件
                    Log.i("mMediaCodec===>", Arrays.toString(spsData));
                    byte[] ppsData = new byte[ppsBuffer.remaining()];
                    ppsBuffer.get(ppsData);
                    Log.i("mMediaCodec===>", Arrays.toString(ppsData));
                    /*writeBytes(ppsData); */// 写入SPS/PPS数据到文件
                    sps_pps = new byte[spsData.length + ppsData.length];

                    System.arraycopy(spsData, 0, sps_pps, 0, spsData.length);
                    System.arraycopy(ppsData, 0, sps_pps, spsData.length, ppsData.length);
                }

            } else if (status == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                //忽略
            } else if (status > 0) {
                //成功 取出一个有效的输出
                ByteBuffer outputBuffer = mMediaCodec.getOutputBuffer(status);
                //writeToFile(bufferInfo, outputBuffer);
                /*if (outputBuffer != null) {
                    sendData(outputBuffer);
                }*/
                //如果获取的ByteBuffer 是配置信息 ,不需要写出到mp4
                if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                    bufferInfo.size = 0;
                }

                if (bufferInfo.size != 0) {
                    //写到mp4
                    //根据偏移定位
                    if (outputBuffer != null && mMuxerStarted && mMuxer != null) {
                        /*outputBuffer.position(bufferInfo.offset);
                        //ByteBuffer 可读写总长度
                        outputBuffer.limit(bufferInfo.offset + bufferInfo.size);
                        //写出
                        if (isNeedKeyFrame && bufferInfo.flags != MediaCodec.BUFFER_FLAG_KEY_FRAME) {
                            Logger.d("need key frame,but not, so drop it");
                        } else {
                            if (System.currentTimeMillis() - startTime > 1 * 60 * 1000) {
                                splitVideo();
                            } else {
                                isNeedKeyFrame = false;
                                if (!isWriteFirstKeyFrame) {
                                    isWriteFirstKeyFrame = true;
                                    startTimeUs = SystemClock.elapsedRealtimeNanos() / 1000;
                                }
                                bufferInfo.presentationTimeUs = pauseRecordTimeUs + (SystemClock.elapsedRealtimeNanos() / 1000 - startTimeUs);
                                bufferInfo.presentationTimeUs = (long) (bufferInfo.presentationTimeUs / mSpeed);
                                mMuxer.writeSampleData(mVideoTrackIndex, outputBuffer, bufferInfo);
                            }
                        }*/

                        if (System.currentTimeMillis() - startTime > 10 * 60 * 1000) {
                            splitVideo();
                        } else {
                            if (bufferInfo.size != 0) {
                                // adjust the ByteBuffer values to match BufferInfo (not needed?)
                                outputBuffer.position(bufferInfo.offset);
                                outputBuffer.limit(bufferInfo.offset + bufferInfo.size);
                                mMuxer.writeSampleData(mVideoTrackIndex, outputBuffer, bufferInfo);
                                Log.d(TAG, "sent " + bufferInfo.size + " bytes to muxer");
                            }
                        }
                    }
                }
                //输出缓冲区 我们就使用完了，可以回收了，让mediacodec继续使用
                mMediaCodec.releaseOutputBuffer(status, false);
                //结束
                if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    break;
                }
            }
        }
    }

    public void stop() {
        Toast.makeText(mContext, mPath, Toast.LENGTH_LONG).show();
        isStart = false;
        isMediaMuxerStart = false;
        isWriteFirstKeyFrame = false;
        mHandler.post(() -> {
            getCodec(true);
            //audioRecord.stop();
            //pcmEncode.stop();
            //audioTrack = -1;
            try {
                stopMuxer();
            } catch (IllegalStateException e) {
                e.printStackTrace();
            }

            mVideoTrackIndex = -1;
            mMediaCodec.stop();
            mMediaCodec.release();
            mMediaCodec = null;

            mMuxer = null;
            mEglBase.release();
            mEglBase = null;
            mInputSurface = null;
            pauseRecordTimeUs = 0;
            mHandler.getLooper().quitSafely();
            mHandler = null;
        });
    }


    /**
     * Audio Record Pcm Callback
     */
    class AudioRecordPcmCallback implements AudioRecordPcm.Callback {

        @Override
        public void start(int sampleRate, int sampleRateType, int channelCount, int maxBufSize) {
            pcmEncode.start(sampleRate, channelCount, maxBufSize, null,
                    new PcmEncodeAacCtrlCallback());
        }

        @Override
        public void offerPcm(@NonNull byte[] data) {
            pcmEncode.offerData(data);
        }

        @Override
        public void finished() {

        }
    }

    /**
     * Pcm Encode to Aac then Callback
     */
    class PcmEncodeAacCtrlCallback implements PcmEncodeAacCtrl.Callback {

        @Override
        public void initSuccess(@NonNull MediaFormat format) {
            //audioTrack = mMediaMuxer.addTrack(format);
            if (mVideoTrackIndex >= 0 && !isMediaMuxerStart) {
                mMuxer.start();
//                startTimeUs = SystemClock.elapsedRealtimeNanos() / 1000;
                isMediaMuxerStart = true;
            }
        }

        @Override
        public void offerAac(@NonNull ByteBuffer data, @NonNull MediaCodec.BufferInfo info) {
            if (isMediaMuxerStart && !isRecordPause && !isNeedKeyFrame) {
                info.presentationTimeUs = pauseRecordTimeUs + (SystemClock.elapsedRealtimeNanos() / 1000 - startTimeUs);
                Logger.d(info.presentationTimeUs);
                //mMediaMuxer.writeSampleData(audioTrack, data, info);
                //Logger.d("aac presentationTimeUs:" + info.presentationTimeUs);
            }
        }

        @Override
        public void finished() {

        }
    }

    public void saveFile() {
        mEglBase.changeFlag();
    }
}
