package com.renovavision.videocodec.encoder;

import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Build;
import android.support.annotation.NonNull;
import android.support.annotation.RequiresApi;
import android.util.Log;
import android.view.Surface;

import com.renovavision.videocodec.VideoCodecConstants;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.atomic.AtomicBoolean;

/**
 * Created by Alexandr Golovach on 12.05.16.
 */

public class TextureEncoder {

    private static final String TAG = TextureEncoder.class.getSimpleName();

    private Worker mWorker;

    // video width
    private int mWidth;

    // video height
    private int mHeight;

    int mTextureID;
    int mFrameInterval = 2;
    int mVideoBitrate = 1 * 100 * 1000;
    int mFPS = 10;

    private SurfaceTexture mSurfaceTexture;

    public void config(int frameInterval, int videoBitrate, int width ,int height, int fps)
    {
        this.mFrameInterval = frameInterval;
        this.mVideoBitrate = videoBitrate;
        this.mWidth = width;
        this.mHeight = height;
        this.mFPS = fps;
    }

    public TextureEncoder(int texID, int width, int height) {
        this.mSurfaceTexture = new SurfaceTexture(texID);

        this.mWidth = width;
        this.mHeight = height;
    }

    // will call when surface will be created
    protected void onSurfaceCreated(Surface surface) {

    }

    // will call before surface will be destroyed
    protected void onSurfaceDestroyed(Surface surface) {

    }

    protected void onEncodedSample(MediaCodec.BufferInfo info, ByteBuffer data) {

    }

    public void start() {
        if (mWorker == null) {
            mWorker = new Worker();
            mWorker.setIsRunning(true);
            mWorker.start();
        }
    }

    public void stop() {
        if (mWorker != null) {
            mWorker.setIsRunning(false);
            mWorker = null;
        }
    }


    // background thread which prepare MediaCodec and start encoding using surface-to-buffer method
    protected class Worker extends Thread {

        //
        private MediaCodec.BufferInfo mBufferInfo;

        // video codec which get access to hardware codec
        private MediaCodec mCodec;

        // indicator for inner loop
        @NonNull
        private final AtomicBoolean mIsRunning = new AtomicBoolean(false);

        private Surface mSurface;

        private final long mTimeoutUsec;

        public Worker() {
            this.mBufferInfo = new MediaCodec.BufferInfo();
            this.mTimeoutUsec = 10000L;
        }

        public void setIsRunning(boolean running) {
            mIsRunning.set(running);
        }

        @NonNull
        public AtomicBoolean isRunning() {
            return mIsRunning;
        }

        @RequiresApi(api = Build.VERSION_CODES.M)
        @Override
        public void run() {
            // prepare video codec
            prepare();

            try {
                while (mIsRunning.get()) {
                    // encode video sources from input buffer
                    encode();
                }

                encode();
            } finally {
                // release video codec resourses
                release();
            }
        }

        void encode() {
            if (!mIsRunning.get()) {
                // if not running anymore, complete stream
                mCodec.signalEndOfInputStream();
            }

            // pre-lollipop api
            if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {

                // get output buffers
                ByteBuffer[] outputBuffers = mCodec.getOutputBuffers();
                for (; ; ) {
                    long time = System.currentTimeMillis();
                    //get status
                    int status = mCodec.dequeueOutputBuffer(mBufferInfo, mTimeoutUsec);
                    if (status == MediaCodec.INFO_TRY_AGAIN_LATER) {
                        // something wrong with codec - need try again
                        if (!mIsRunning.get()) {
                            break;
                        }
                    } else if (status == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                        // need get new output buffers
                        outputBuffers = mCodec.getOutputBuffers();
                    } else if (status >= 0) {

                        // encoded sample
                        ByteBuffer data = outputBuffers[status];
                        data.position(mBufferInfo.offset);
                        data.limit(mBufferInfo.offset + mBufferInfo.size);

                        final int endOfStream = mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM;
                        if (endOfStream == 0) {
                            onEncodedSample(mBufferInfo, data);
                        }
                        // releasing buffer is important
                        mCodec.releaseOutputBuffer(status, false);

                        // don't have any buffers - need finish
                        if (endOfStream == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
                            break;
                        }
                    }
                    Log.d("streaming", "encode time:" + (System.currentTimeMillis() - time));
                }
            } else {
                for (; ; ) {
                    long time = System.currentTimeMillis();
                    //get status
                    int status = mCodec.dequeueOutputBuffer(mBufferInfo, mTimeoutUsec);
                    if (status == MediaCodec.INFO_TRY_AGAIN_LATER) {
                        // something wrong with codec - need try again
                        if (!mIsRunning.get()) {
                            break;
                        }
                    } else if (status >= 0) {
                        // encoded sample
                        ByteBuffer data = mCodec.getOutputBuffer(status);
                        if (data != null) {

                            final int endOfStream = mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM;
                            if (endOfStream == 0) {
                                onEncodedSample(mBufferInfo, data);
                            }
                            // release buffer
                            mCodec.releaseOutputBuffer(status, false);

                            // don't have any buffers - need finish
                            if (endOfStream == MediaCodec.BUFFER_FLAG_END_OF_STREAM) {
                                break;
                            }
                        }
                    }
                    Log.d("streaming", "encode time:" + (System.currentTimeMillis() - time));
                }
            }
        }

        // release all resources
        private void release() {
            onSurfaceDestroyed(mSurface);

            mCodec.stop();
            mCodec.release();
            mSurface.release();
        }

        private MediaFormat getOutputFormat() {
            return mCodec.getOutputFormat();
        }

        @RequiresApi(api = Build.VERSION_CODES.M)
        private void prepare() {
            // configure video output
            MediaFormat format = MediaFormat.createVideoFormat(VideoCodecConstants.VIDEO_CODEC, mWidth, mHeight);
            format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                    MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
            format.setInteger(MediaFormat.KEY_BIT_RATE, mVideoBitrate);
            format.setInteger(MediaFormat.KEY_FRAME_RATE, mFPS);
            format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, mFrameInterval);

            try {
                mCodec = MediaCodec.createEncoderByType(VideoCodecConstants.VIDEO_CODEC);
            } catch (IOException e) {
                // can not create avc codec - throw exception
                throw new RuntimeException(e);
            }
            mCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            // create surface associated with code
            //mSurface = mCodec.createInputSurface();
            //mSurfaceView.addMediaCodecSurface(mSurface);

            //mCodec.setInputSurface(mTextureView);
            mSurface = new Surface(mSurfaceTexture);
            mCodec.setInputSurface(mSurface);

            // notify codec to start watch surface and encode samples
            mCodec.start();

            onSurfaceCreated(mSurface);
        }
    }
}
