package tq.com.easysurfacecapture;

import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.util.Log;
import android.view.ViewTreeObserver;

import org.bytedeco.javacv.AndroidFrameConverter;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.FrameFilter;
import org.bytedeco.javacv.FrameRecorder;

import java.io.File;
import java.nio.ShortBuffer;

import tq.com.tqcom.util.ImgUtils;

public class TQOpencvRecord {
    static private String LOG_TAG = "TQOpencvRecord";
    private TQCaputreView caputreView;
    private boolean isStart = false;

    private FFmpegFrameRecorder mRecorder;

    private int sampleAudioRateInHz = 44100;
    private int imageWidth = 320;
    private int imageHeight = 240;
    private int frameRate = 30;

    private int viewWidth, viewHeight = 0;

    private boolean recording = false;
    private long startTime = 0;

    private Frame yuvImage = null;
    volatile private boolean isStop = false;

    private ImageCaptureThread imageCaptureThread = null;

    /* audio data getting thread */
    private AudioRecord audioRecord = null;
    private AudioThread audioThread = null;

    public void initRecord(TQCaputreView view){
        caputreView = view;

        caputreView.getViewTreeObserver().addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
            @Override
            public void onGlobalLayout() {
                if(!isStart){
                    viewWidth = caputreView.getWidth();
                    viewHeight = caputreView.getHeight();

                    imageWidth = viewWidth;
                    imageHeight = viewHeight;
                    if(imageWidth > 640){
                        imageHeight = (int)(640.f/imageWidth * imageHeight);
                        imageWidth = 640;
                    }

                    if(imageWidth % 2 == 1){
                        imageWidth += 1;
                    }
                }
            }
        });
    }

    private void initRecord(String file){
        if(yuvImage == null){
            yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
        }
        if(mRecorder != null){
            this.release();
        }
        File file1 = new File(file);
        file1.deleteOnExit();

        mRecorder = new FFmpegFrameRecorder(file1, imageWidth, imageHeight, 1);

        mRecorder.setFormat("mp4");
        mRecorder.setSampleRate(sampleAudioRateInHz);
        mRecorder.setFrameRate(frameRate);

        isStop = false;
    }
    public boolean isRecording(){return recording;}

    public void startRecording(String file) {
        initRecord(file);

        try {
            mRecorder.start();
            startTime = System.currentTimeMillis();
            recording = true;

            //IMAGE
            imageCaptureThread = new ImageCaptureThread();
            imageCaptureThread.start();

            audioThread = new AudioThread();
            audioThread.start();
        } catch (FFmpegFrameRecorder.Exception e) {
            e.printStackTrace();
        }
    }
    public void stopRecording(){
        isStop = true;
        recording = false;
        try {
            if(imageCaptureThread != null){
                imageCaptureThread.join();
            }
        }catch (InterruptedException e){
            e.printStackTrace();
        }

        try {
            if(audioThread != null){
                audioThread.join();
            }
        }catch (InterruptedException e){
            e.printStackTrace();
        }

        try {
            mRecorder.stop();
        }catch (FrameRecorder.Exception e){
            e.printStackTrace();
        }
    }

    public void release(){
        if(!isStop){
            stopRecording();
        }
        try {
            if(mRecorder != null){
                mRecorder.release();
            }

        }catch (FrameRecorder.Exception e){
            e.printStackTrace();
        }
    }


    private Bitmap getBitMap(){
        Matrix matrix = new Matrix();
        matrix.setScale(0.5f, 0.5f);

        Bitmap bitmap = caputreView.drawImage();
//        Bitmap newbitmap = null;
//        if(bitmap != null){
//            newbitmap = Bitmap.createBitmap(bitmap, 0, 0, viewWidth, viewHeight, null, false);
//            bitmap.recycle();
//        }


        return bitmap;
    }

    class ImageCaptureThread extends Thread{
        @Override
        public void run() {
            while (!isStop){

                Bitmap bitmap = getBitMap();
                if(bitmap == null){
                    continue;
                }

                AndroidFrameConverter covert = new AndroidFrameConverter();
                Frame frame = covert.convert(bitmap);

                bitmap.recycle();
                try {
                    long t = 1000 * (System.currentTimeMillis() - startTime);
                    if (t > mRecorder.getTimestamp()) {
                        mRecorder.setTimestamp(t);
                    }

                    mRecorder.record(frame);
                }catch (FFmpegFrameRecorder.Exception e){
                    e.printStackTrace();
                }
            }
        }
    }

    //---------------------------------------------
    // audio thread, gets and encodes audio data
    //---------------------------------------------
    class AudioThread extends Thread {

        @Override
        public void run() {
            android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

            // Audio
            int bufferSize;
            ShortBuffer audioData;
            int bufferReadResult;

            bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
                    AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
            audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
                    AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);

            audioData = ShortBuffer.allocate(bufferSize);

            Log.d(LOG_TAG, "audioRecord.startRecording()");
            audioRecord.startRecording();

            /* ffmpeg_audio encoding loop */
            while (!isStop) {

                //Log.v(LOG_TAG,"recording? " + recording);
                bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
                audioData.limit(bufferReadResult);
                if (bufferReadResult > 0) {
                    Log.v(LOG_TAG,"bufferReadResult: " + bufferReadResult);
                    // If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
                    // Why?  Good question...
                    if (recording) {
                        try {
                            mRecorder.recordSamples(audioData);
                            //Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
                        } catch (FFmpegFrameRecorder.Exception e) {
                            Log.v(LOG_TAG,e.getMessage());
                            e.printStackTrace();
                        }
                    }
                }
            }
            Log.v(LOG_TAG,"AudioThread Finished, release audioRecord");

            /* encoding finish, release recorder */
            if (audioRecord != null) {
                audioRecord.stop();
                audioRecord.release();
                audioRecord = null;
                Log.v(LOG_TAG,"audioRecord released");
            }
        }
    }

}
