package com.codecrecord;

import android.text.TextUtils;
import android.util.Log;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Set;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.TimeUnit;

/**
 * Created by android on 8/1/17.
 */

public class CameraCodecRecorder {
    private static final boolean DEBUG = true;

    private static final String TAG = "LogUtils-CodecRecorder";
    private static final String TAG_THREAD = "LogUtils-CameraThread";
    private static final boolean DEBUG_RECORDER_TIME_LOG = false;
    private Fps mFps = new Fps();
    private final Object mSync = new Object();
    private final Set<ICodecRecordCallback> mCallbacks = new CopyOnWriteArraySet<>();
    private boolean mIsRecording;
    private int mWidth, mHeight;

    private byte[] mCoverBuffer;
    private String mVideoPath;
    private boolean mRecordVoice;

    public void setWidth(int mWidth) {
        this.mWidth = mWidth;
    }

    public void setHeight(int mHeight) {
        this.mHeight = mHeight;
    }

    public void setVideoPath(String mVideoPath) {
        this.mVideoPath = mVideoPath;
    }

    public void setRecordVoice(boolean recordVoice) {
        this.mRecordVoice = recordVoice;
    }

    private ICodecRecordCallback mCameraManger;

    /**
     * muxer for audio/video recording
     */
    private MediaMuxerWrapper mMuxer;
    private MediaVideoBufferEncoder mVideoEncoder;
    private CoverThread coverThread;

    /**
     */
    public CameraCodecRecorder(final ICodecRecordCallback camera) {
        mCameraManger = camera;
    }

    public boolean isRecording() {
        synchronized (mSync) {
            return (mMuxer != null);
        }
    }

    public void handleClose() {
        if (DEBUG) Log.v(TAG_THREAD, "handleClose:");
        handleStopRecording();
    }

    //1 you should check camera is previewing now, if not, should not start record
    //2 you should call stop record when camera stop preview or close
    //3 you should call onFrame when camera preview get new frame
    public int handleStartRecording() {
        if (DEBUG) Log.v(TAG_THREAD, "handleStartRecording:");
        try {
            synchronized (mSync) {
                if (mMuxer != null) return -1;

                if (TextUtils.isEmpty(mVideoPath)) {
                    return -2;
                }

                if (mWidth <= 0 || mHeight <= 0) {
                    return -3;
                }
            }

            final MediaMuxerWrapper muxer = new MediaMuxerWrapper(mVideoPath);    // if you record audio only, ".m4a" is also OK.
            MediaVideoBufferEncoder videoEncoder = new MediaVideoBufferEncoder(muxer, mWidth, mHeight, mMediaEncoderListener);
            synchronized (mSync) {
                if (mRecordVoice) {
                    // for audio capturing
                    new MediaAudioEncoder(muxer, mMediaEncoderListener);
                }
            }
            muxer.prepare();
            muxer.startRecording();
            synchronized (mSync) {
                mMuxer = muxer;
                mVideoEncoder = videoEncoder;
                mCoverBuffer = new byte[mWidth * mHeight * 3 / 2];
                coverThread = new CoverThread();
                coverThread.start();
            }
        } catch (final IOException e) {
            Log.e(TAG, "startCapture:", e);
            return -4;
        }
        return 0;
    }

    public void handleStopRecording() {
        if (DEBUG) Log.v(TAG_THREAD, "handleStopRecording:mMuxer=" + mMuxer);
        final MediaMuxerWrapper muxer;
        synchronized (mSync) {
            muxer = mMuxer;
            mMuxer = null;
            mVideoEncoder = null;
        }
        if (muxer != null) {
            muxer.stopRecording();
        }
    }

    private boolean onFrame(final byte[] frame) {
        final MediaVideoBufferEncoder videoEncoder;
        boolean ret = true;
        synchronized (mSync) {
            videoEncoder = mVideoEncoder;
        }
        if (videoEncoder != null) {
            if (mIsRecording) {
                videoEncoder.frameAvailableSoon();
                videoEncoder.encode(frame);
            }
        } else {
            ret = false;
        }
        mFps.step();
        return ret;
    }

    public void handleRelease() {
        if (DEBUG) Log.v(TAG_THREAD, "handleRelease:mIsRecording=" + mIsRecording);
        handleClose();
        mCallbacks.clear();
        if (DEBUG) Log.v(TAG_THREAD, "handleRelease:finished");
    }

    private final MediaEncoder.MediaEncoderListener mMediaEncoderListener = new MediaEncoder.MediaEncoderListener() {
        @Override
        public void onPrepared(final MediaEncoder encoder) {
            if (DEBUG) Log.v(TAG, "onPrepared:encoder=" + encoder);
            mIsRecording = true;
        }

        @Override
        public void onStopped(final MediaEncoder encoder) {
            if (DEBUG) Log.v(TAG_THREAD, "onStopped:encoder=" + encoder);
            try {
                mIsRecording = false;
                final String path = encoder.getOutputPath();
                if (!TextUtils.isEmpty(path)) {
                    mCameraManger.onNewVideo(path);
                }
            } catch (final Exception e) {
                Log.e(TAG, "onPrepared:", e);
            }
        }
    };

    private static final int MAX_QUEUE_SIZE = 5;
    private ArrayBlockingQueue<byte[]> frameQueue = new ArrayBlockingQueue<>(MAX_QUEUE_SIZE);

    private int dropCount = 0;
    public void addFrame(byte[] frame) {
        if (isRecording()) {
            if (frameQueue.size() < MAX_QUEUE_SIZE) {
                frameQueue.add(frame);
            } else {
                Log.d(TAG, "drop frame sum: " + dropCount++);
            }
        }
    }

    private class CoverThread extends Thread {
        @Override
        public void run() {
            dropCount = 0;
            frameQueue.clear();
            while (true) {
                try {
                    long cur,cur2,cur3,cur4;
                    if (DEBUG_RECORDER_TIME_LOG) {
                        cur = System.currentTimeMillis();
                    }
                    byte[] data = frameQueue.poll(10, TimeUnit.MILLISECONDS);
                    if (DEBUG_RECORDER_TIME_LOG) {
                        cur2 = System.currentTimeMillis();
                        Log.d(TAG, "poll time is " + (cur2 - cur));
                    }
                    if (data != null) {
//                        byte[] yuv420sp = new byte[mWidth * mHeight * 3 / 2];
                        NV21ToNV12(data, mCoverBuffer, mWidth, mHeight);
                        if (DEBUG_RECORDER_TIME_LOG) {
                            cur3 = System.currentTimeMillis();
                            Log.d(TAG, "NV21ToNV12 time is " + (cur3 - cur2));
                        }
                        //nv21ToI420(data, yuv420sp, mWidth, mHeight);
                        if (!onFrame(mCoverBuffer)) {
                            Log.d(TAG, "CoverThread exit");
                            return;
                        }
                        if (DEBUG_RECORDER_TIME_LOG) {
                            cur4 = System.currentTimeMillis();
                            Log.d(TAG, "onFrame time is " + (cur4 - cur3));
                        }
                    }
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }
        }
    }

    private void NV21ToNV12(byte[] nv21, byte[] nv12, int width, int height) {
        if (nv21 == null || nv12 == null) return;
        int framesize = width * height;
        int i = 0, j = 0;
        System.arraycopy(nv21, 0, nv12, 0, framesize);
        for (i = 0; i < framesize; i++) {
            nv12[i] = nv21[i];
        }
        for (j = 0; j < framesize / 2; j += 2) {
            nv12[framesize + j - 1] = nv21[j + framesize];
        }
        for (j = 0; j < framesize / 2; j += 2) {
            nv12[framesize + j] = nv21[j + framesize - 1];
        }
    }

    public static byte[] nv21ToI420(byte[] data, byte[] output, int width,
                                    int height) {
        byte[] ret = output;
        int total = width * height;

        ByteBuffer bufferY = ByteBuffer.wrap(ret, 0, total);
        ByteBuffer bufferU = ByteBuffer.wrap(ret, total, total / 4);
        ByteBuffer bufferV = ByteBuffer.wrap(ret, total + total / 4, total / 4);

        bufferY.put(data, 0, total);
        for (int i = total; i < data.length; i += 2) {
            bufferV.put(data[i]);
            bufferU.put(data[i + 1]);
        }

        return ret;
    }
}
