package com.aispeech.aicube.robot;

import android.media.AudioFormat;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Environment;
import android.util.Log;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.LinkedList;

public class MyMemory {

    private static final String TAG = "MyMemory";

    private String MIME_TYPE = MediaFormat.MIMETYPE_AUDIO_AAC;

    private final int SAMPLE_RATE = 16000;
    private final int CHANNEL_COUNT = 1;

    private static final int SAMPLES_PER_FRAME = 3600;

    private final int KEY_BIT_RATE = 64000;

    private static final String MP4_PATH = Environment.getExternalStorageDirectory().getAbsolutePath() + "/aispeech/";

    private static MyMemory instance;

    private MediaMuxer mediaMuxer;

    private volatile boolean isMuxerStart = false;
    private volatile boolean isProcessing = false;

    private volatile boolean isAudioCodecInit = false;
    private volatile boolean isVideoCodecInit = false;

    private int trackFlag = 0;

    public static LinkedList<byte[]> mYUVQueue, mPcmQueue;

    private byte[] yuv420spsrc;
    private int mWidth, mHeight;

    private MediaCodec mVideoCodec, mAudioCodec;
    private int mVideoTrackIndex, mAudioTrackIndex;
    private long mStartTime;

    private ProcessThread mProcessThread;


    public static MyMemory getInstance() {
        if (instance == null) {
            synchronized (MyMemory.class) {
                if (instance == null) {
                    instance = new MyMemory();
                }
            }
        }
        return instance;
    }

    private void NV21ToNV12(byte[] nv21, byte[] nv12, int width, int height) {
        if (nv21 == null || nv12 == null) {
            return;
        }
        int frameSize = width * height;
        int i, j;
        System.arraycopy(nv21, 0, nv12, 0, frameSize);
        for (i = 0; i < frameSize; i++) {
            nv12[i] = nv21[i];
        }
        for (j = 0; j < frameSize / 2; j += 2) {
            nv12[frameSize + j - 1] = nv21[j + frameSize];
        }
        for (j = 0; j < frameSize / 2; j += 2) {
            nv12[frameSize + j] = nv21[j + frameSize - 1];
        }
    }

    public synchronized void recordAudio(byte[] buffer, int bufferSize) {
        if (!isAudioReady() || null == mPcmQueue) {
            Log.w(TAG, "recordAudio: " + trackFlag);
            return;
        }
        mPcmQueue.push(buffer);
    }

    public synchronized void recordVideo(byte[] data, int w, int h) {
        Log.d(TAG, "recordVideo: " + data.length);
        if (!isVideoReady() || null == mYUVQueue) {
            Log.w(TAG, "recordVideo: " + trackFlag);
            return;
        }
        mYUVQueue.push(data);
    }

    public void initAudioCodec() {
        Log.d(TAG, "initAudioCodec: ");
        try {
            mAudioCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_AUDIO_AAC);
            MediaFormat mediaFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, CHANNEL_COUNT);
            mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNEL_COUNT);
            mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
            mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, SAMPLES_PER_FRAME);
            mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, KEY_BIT_RATE);
            mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
            mAudioCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);

            mAudioCodec.start();
            isAudioCodecInit = true;

            tryStartProcess();
//            mAudioTrackIndex = addAudioTrack(mediaFormat);
//            start();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }


    public void initMediaCodec(int w, int h) {
        Log.d(TAG, "initMediaCodec() called with: w = [" + w + "], h = [" + h + "]");
        try {
            mWidth = w;
            mHeight = h;

            yuv420spsrc = new byte[w * h * 3 / 2];
            //编码格式，AVC对应的是H264
            MediaFormat mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, w, h);
            //YUV 420 对应的是图片颜色采样格式
            mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
            //比特率
            mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 3000000);
            //帧率
            mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
            //I 帧间隔
            mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
            mVideoCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
            //进入配置状态
            mVideoCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            //进行生命周期执行状态
            mVideoCodec.start();

            isVideoCodecInit = true;
            tryStartProcess();

//            mVideoTrackIndex = addVideoTrack(mediaFormat);
//            start();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    private void tryStartProcess() {
        Log.d(TAG, "tryStartProcess: ");
        if (isVideoCodecInit && isAudioCodecInit) {
            mProcessThread.start();
        }
    }

    private MyMemory() {

    }

    class
    ProcessThread extends Thread {
        @Override
        public void run() {
            super.run();
            Log.d(TAG, "mProcessThread run: +");
//            MediaFormat aOutputFormat = mAudioCodec.getOutputFormat();
//            MediaFormat vOutputFormat = mVideoCodec.getOutputFormat();

//            mAudioCodec.start();
//            mVideoCodec.start();


            mStartTime = System.nanoTime();
            MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
            int inputBufferId;
            int outputBufferId;

            while (isProcessing) {
                //audio -----------------------------------------------------------------------
                inputBufferId = mAudioCodec.dequeueInputBuffer(0);

                synchronized (this) {
                    byte[] abuf = getAudioBuf();
                    if (inputBufferId >= 0 && null != abuf) {
                        ByteBuffer inputBuffer = mAudioCodec.getInputBuffer(inputBufferId);
                        inputBuffer.clear();
                        inputBuffer.put(abuf, 0, abuf.length);
                        mAudioCodec.queueInputBuffer(inputBufferId, 0, abuf.length, (System.nanoTime() - mStartTime) / 1000, 0);
                    }

                    outputBufferId = mAudioCodec.dequeueOutputBuffer(bufferInfo, 0);
                    if (outputBufferId >= 0) {
                        ByteBuffer outputBuffer = mAudioCodec.getOutputBuffer(outputBufferId);
                        MediaFormat bufferFormat = mAudioCodec.getOutputFormat(outputBufferId); // option A

                        outputBuffer.position(bufferInfo.offset);
                        outputBuffer.limit(bufferInfo.offset + bufferInfo.size);

                        writeSampleData(mAudioTrackIndex, outputBuffer, bufferInfo);

                        mAudioCodec.releaseOutputBuffer(outputBufferId, false);
                    } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                        // Subsequent data will conform to new format.
                        // Can ignore if using getOutputFormat(outputBufferId) // option B
                        Log.w(TAG, "mAudioCodec: INFO_OUTPUT_FORMAT_CHANGED");
                        addAudioTrack(mAudioCodec.getOutputFormat());
                        startMuxer();
                    }
                }


                //video -----------------------------------------------------------------------
                synchronized (this) {
                    byte[] vbuf = getVideoBuf();
                    // 拿到有空闲的输入缓存区下标
                    inputBufferId = mVideoCodec.dequeueInputBuffer(0);
                    if (inputBufferId >= 0 && null != vbuf) {
                        NV21ToNV12(vbuf, yuv420spsrc, mWidth, mHeight);
                        //有效的空的缓存区
                        ByteBuffer inputBuffer = mVideoCodec.getInputBuffer(inputBufferId);
                        if (yuv420spsrc == null) {
                            return;
                        }
                        inputBuffer.put(yuv420spsrc);
                        //将数据放到编码队列
                        mVideoCodec.queueInputBuffer(inputBufferId, 0, yuv420spsrc.length, (System.nanoTime() - mStartTime) / 1000, 0);
                    } else {
                        Log.d(TAG, "run: mVideoCodec inputBufferId = " + inputBufferId);
                    }

                    //得到成功编码后输出的out buffer Id
                    outputBufferId = mVideoCodec.dequeueOutputBuffer(bufferInfo, 0);
                    if (outputBufferId >= 0) {
                        ByteBuffer outputBuffer = mVideoCodec.getOutputBuffer(outputBufferId);
                        byte[] out = new byte[bufferInfo.size];
                        outputBuffer.get(out);
                        outputBuffer.position(bufferInfo.offset);
                        outputBuffer.limit(bufferInfo.offset + bufferInfo.size);

                        writeSampleData(mVideoTrackIndex, outputBuffer, bufferInfo);

                        mVideoCodec.releaseOutputBuffer(outputBufferId, false);
                    } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                        Log.w(TAG, "mVideoCodec: INFO_OUTPUT_FORMAT_CHANGED");
//                        vOutputFormat = mVideoCodec.getOutputFormat(); // option B
                        addVideoTrack(mVideoCodec.getOutputFormat());
                        startMuxer();
                    } else {
                        Log.d(TAG, "run: mVideoCodec inputBufferId = " + outputBufferId);
                    }
                }
            }

            mAudioCodec.stop();
            mAudioCodec.release();

            mVideoCodec.stop();
            mVideoCodec.release();
            Log.d(TAG, "mProcessThread run: -");
        }
    }

    private byte[] getVideoBuf() {
        if (null != mYUVQueue && mYUVQueue.size() > 0) {
            return mYUVQueue.pop();
        }
        return null;
    }

    private byte[] getAudioBuf() {
        if (null != mPcmQueue && mPcmQueue.size() > 0) {
            return mPcmQueue.pop();
        }
        return null;
    }

    private int addAudioTrack(MediaFormat mediaFormat) {
        synchronized (this) {
//            trackCount++;
            trackFlag |= 1;
            Log.d(TAG, "addAudioTrack trackFlag: " + trackFlag);
            return mediaMuxer.addTrack(mediaFormat);

        }
    }

    private int addVideoTrack(MediaFormat mediaFormat) {
        synchronized (this) {
            trackFlag |= (1 << 1);
            Log.d(TAG, "addAudioTrack trackFlag : " + trackFlag);
            return mediaMuxer.addTrack(mediaFormat);
        }
    }

    private void writeSampleData(int traceIndex, ByteBuffer byteBuffer, MediaCodec.BufferInfo bufferInfo) {
//        synchronized (this) {
        if (!isMuxerStart || null == byteBuffer) {
            return;
        }
//            Log.d(TAG, "writeSampleData : " + traceIndex);
        mediaMuxer.writeSampleData(traceIndex, byteBuffer, bufferInfo);
//        }
    }


    public void init() {
        try {
            Log.d(TAG, "init");
            String mp4FileName = MP4_PATH + System.currentTimeMillis() + "_aicube.mp4";
            mediaMuxer = new MediaMuxer(mp4FileName, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
            mPcmQueue = new LinkedList<>();
            mYUVQueue = new LinkedList<>();

            //fake data for trigger INFO_OUTPUT_FORMAT_CHANGED
            mYUVQueue.push(new byte[1280 * 600 * 2]);

            isProcessing = true;
            mProcessThread = new ProcessThread();
//            mProcessThread.start();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public void startMuxer() {
        synchronized (this) {
            Log.d(TAG, "start trackFlag:" + trackFlag);

            if (isMuxerStart || trackFlag != (1 << 1 | 1)) {
                Log.w(TAG, "start: already isStart :[" + isMuxerStart + "] or trackFlag:[" + trackFlag + "]");
                return;
            }

            mediaMuxer.start();
            isMuxerStart = true;
        }
    }

    public void destroy() {
        Log.d(TAG, "destroy: +");
        try {
            if (isProcessing) {
                isProcessing = false;
                mProcessThread.join();
                mProcessThread = null;
            }

            if (null != mediaMuxer && isMuxerStart) {
                mediaMuxer.stop();
                mediaMuxer.release();
                mediaMuxer = null;
            }

            if (null != mPcmQueue) {
                mPcmQueue.clear();
                mPcmQueue = null;
            }

            if (null != mYUVQueue) {
                mYUVQueue.clear();
                mYUVQueue = null;
            }

            isAudioCodecInit = false;
            isVideoCodecInit = false;

            trackFlag = 0;
            instance = null;
        } catch (Exception e) {
            e.printStackTrace();
        }
        Log.d(TAG, "destroy: -");
    }

    public boolean isAudioReady() {
        return (trackFlag & 1) == 1;
    }

    public boolean isVideoReady() {
        return ((trackFlag >> 1) & 1) == 1;

    }
}
