package com.android.rockchip.camera2.view;
/*
 * AudioVideoRecordingSample
 * Sample project to cature audio and video from internal mic/camera and save as MPEG4 file.
 *
 * Copyright (c) 2014-2015 saki t_saki@serenegiant.com
 *
 * File name: CameraGLView.java
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 *  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an "AS IS" BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 *
 * All files in the folder are under this Apache License, Version 2.0.
 */

import android.app.Activity;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;


import com.android.rockchip.camera2.encoder.MediaVideoEncoder;
import com.android.rockchip.camera2.glutilsOld.GLDrawer2D;

import java.lang.ref.WeakReference;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

/**
 * Sub class of GLSurfaceView to display camera preview and write video frame to capturing surface
 */
public final class CameraGLView extends GLSurfaceView {
    private static final boolean DEBUG = true; // TODO set false on release
    private static final String TAG = "CameraGLView";
    private static final int SCALE_STRETCH_FIT = 0;
    private static final int SCALE_KEEP_ASPECT_VIEWPORT = 1;
    private static final int SCALE_KEEP_ASPECT = 2;
    private static final int SCALE_CROP_CENTER = 3;

    private final CameraSurfaceRenderer mRenderer;
    private boolean mHasSurface;
    private int mVideoWidth, mVideoHeight;
    private int mRotation;
    private int mScaleMode = SCALE_STRETCH_FIT;
    private static Activity mActivity;


    public CameraGLView(final Context context) {
        this(context, null, 0);
    }

    public CameraGLView(final Context context, final AttributeSet attrs) {
        this(context, attrs, 0);
    }

    public CameraGLView(final Context context, final AttributeSet attrs, final int defStyle) {
        super(context, attrs);
        if (DEBUG) Log.v(TAG, "CameraGLView:");
        mRenderer = new CameraSurfaceRenderer(this);
        setEGLContextClientVersion(2);    // GLES 2.0, API >= 8
        setRenderer(mRenderer);
    }

    public void setCameraSurfaceListener(CameraSurfaceRenderer.CameraSurfaceLinstener cameraSurfaceListener){
        mRenderer.setCameraSurfaceLinstener(cameraSurfaceListener);
    }

    @Override
    public void onResume() {
        if (DEBUG) Log.v(TAG, "onResume:");
        super.onResume();
    }

    @Override
    public void onPause() {
        if (DEBUG) Log.v(TAG, "onPause:");
        super.onPause();
    }

    public void setScaleMode(final int mode) {
        if (mScaleMode != mode) {
            mScaleMode = mode;
            queueEvent(new Runnable() {
                @Override
                public void run() {
                    mRenderer.updateViewport();
                }
            });
        }
    }

    public int getScaleMode() {
        return mScaleMode;
    }

    public void setVideoSize(final int width, final int height) {
        if ((mRotation % 180) == 0) {
            mVideoWidth = width;
            mVideoHeight = height;
        } else {
            mVideoWidth = height;
            mVideoHeight = width;
        }
        queueEvent(new Runnable() {
            @Override
            public void run() {
                mRenderer.updateViewport();
            }
        });
    }

    public int getVideoWidth() {
        return mVideoWidth;
    }

    public int getVideoHeight() {
        return mVideoHeight;
    }

    public SurfaceTexture getSurfaceTexture() {
        if (DEBUG)
            Log.v(TAG, "getSurfaceTexture:");
        return mRenderer != null ? mRenderer.mSTexture : null;
    }

    @Override
    public void surfaceDestroyed(final SurfaceHolder holder) {
        if (DEBUG) Log.v(TAG, "surfaceDestroyed:");
//        if (mCameraHandler != null) {
//            // wait for finish previewing here
//            // otherwise camera try to display on un-exist Surface and some error will occure
//            mCameraHandler.stopPreview(true);
//        }
        mHasSurface = false;
        mRenderer.onSurfaceDestroyed();
        super.surfaceDestroyed(holder);
    }

    public void setVideoEncoder(final MediaVideoEncoder encoder) {
        if (DEBUG) Log.v(TAG, "setVideoEncoder:tex_id=" + mRenderer.hTex + ",encoder=" + encoder);
        queueEvent(new Runnable() {
            @Override
            public void run() {
                synchronized (mRenderer) {
                    if (encoder != null) {
                        encoder.setEglContext(EGL14.eglGetCurrentContext(), mRenderer.hTex);
                    }
                    mRenderer.mVideoEncoder = encoder;
                }
            }
        });
    }

    //********************************************************************************
//********************************************************************************
//    private synchronized void startPreview(final int width, final int height) {
//        if (mCameraHandler == null) {
//            final CameraThread thread = new CameraThread(this);
//            thread.start();
//            mCameraHandler = thread.getHandler();
//        }
//        mCameraHandler.startPreview(1280, 720/*width, height*/);
//    }

    /**
     * GLSurfaceViewのRenderer
     */
    public static final class CameraSurfaceRenderer
            implements Renderer,
            SurfaceTexture.OnFrameAvailableListener {    // API >= 11
        private final WeakReference<CameraGLView> mWeakParent;
        private SurfaceTexture mSTexture;    // API >= 11
        private int hTex;
        private GLDrawer2D mDrawer;
        private final float[] mStMatrix = new float[16];
        private final float[] mMvpMatrix = new float[16];
        private MediaVideoEncoder mVideoEncoder;
        public interface CameraSurfaceLinstener{
            void onSurfaceCreated();
        }
        CameraSurfaceLinstener mCameraSurfaceLinstener;

        public void setCameraSurfaceLinstener(CameraSurfaceLinstener mCameraSurfaceLinstener){
            this.mCameraSurfaceLinstener = mCameraSurfaceLinstener;
        }

        public CameraSurfaceRenderer(final CameraGLView parent) {
            if (DEBUG) Log.v(TAG, "CameraSurfaceRenderer:");
            mWeakParent = new WeakReference<CameraGLView>(parent);
            Matrix.setIdentityM(mMvpMatrix, 0);
        }

        @Override
        public void onSurfaceCreated(final GL10 unused, final EGLConfig config) {
            if (DEBUG) Log.v(TAG, "onSurfaceCreated:");
            // This renderer required OES_EGL_image_external extension
            if(mCameraSurfaceLinstener != null){
                mCameraSurfaceLinstener.onSurfaceCreated();
            }
            final String extensions = GLES20.glGetString(GLES20.GL_EXTENSIONS);    // API >= 8
//			if (DEBUG) Log.i(TAG, "onSurfaceCreated:Gl extensions: " + extensions);
            if (!extensions.contains("OES_EGL_image_external"))
                throw new RuntimeException("This system does not support OES_EGL_image_external.");
            // create textur ID
            hTex = GLDrawer2D.initTex();
            // create SurfaceTexture with texture ID.
            mSTexture = new SurfaceTexture(hTex);
            mSTexture.setOnFrameAvailableListener(this);
            // clear screen with yellow color so that you can see rendering rectangle
            GLES20.glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
            final CameraGLView parent = mWeakParent.get();
            if (parent != null) {
                parent.mHasSurface = true;
            }
            // create object for preview display
            mDrawer = new GLDrawer2D();
            mDrawer.setMatrix(mMvpMatrix, 0);
        }

        @Override
        public void onSurfaceChanged(final GL10 unused, final int width, final int height) {
            if (DEBUG) Log.v(TAG, String.format("onSurfaceChanged:(%d,%d)", width, height));
            // if at least with or height is zero, initialization of this view is still progress.
            if ((width == 0) || (height == 0)) return;
            updateViewport();
            final CameraGLView parent = mWeakParent.get();
        }

        /**
         * when GLSurface context is soon destroyed
         */
        public void onSurfaceDestroyed() {
            if (DEBUG) Log.v(TAG, "onSurfaceDestroyed:");
            if (mDrawer != null) {
                mDrawer.release();
                mDrawer = null;
            }
            if (mSTexture != null) {
                mSTexture.release();
                mSTexture = null;
            }
            GLDrawer2D.deleteTex(hTex);
        }

        private final void updateViewport() {
            final CameraGLView parent = mWeakParent.get();
            if (parent != null) {
                final int view_width = parent.getWidth();
                final int view_height = parent.getHeight();
                GLES20.glViewport(0, 0, view_width, view_height);
                GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
                final double video_width = parent.mVideoWidth;
                final double video_height = parent.mVideoHeight;
                if (video_width == 0 || video_height == 0) return;
                Matrix.setIdentityM(mMvpMatrix, 0);
                final double view_aspect = view_width / (double) view_height;
                Log.i(TAG, String.format("view(%d,%d)%f,video(%1.0f,%1.0f)", view_width, view_height, view_aspect, video_width, video_height));
                switch (parent.mScaleMode) {
                    case SCALE_STRETCH_FIT:
                        break;
                    case SCALE_KEEP_ASPECT_VIEWPORT: {
                        final double req = video_width / video_height;
                        int x, y;
                        int width, height;
                        if (view_aspect > req) {
                            // if view is wider than camera image, calc width of drawing area based on view height
                            y = 0;
                            height = view_height;
                            width = (int) (req * view_height);
                            x = (view_width - width) / 2;
                        } else {
                            // if view is higher than camera image, calc height of drawing area based on view width
                            x = 0;
                            width = view_width;
                            height = (int) (view_width / req);
                            y = (view_height - height) / 2;
                        }
                        // set viewport to draw keeping aspect ration of camera image
                        if (DEBUG)
                            Log.v(TAG, String.format("xy(%d,%d),size(%d,%d)", x, y, width, height));
                        GLES20.glViewport(x, y, width, height);
                        break;
                    }
                    case SCALE_KEEP_ASPECT:
                    case SCALE_CROP_CENTER: {
                        final double scale_x = view_width / video_width;
                        final double scale_y = view_height / video_height;
                        final double scale = (parent.mScaleMode == SCALE_CROP_CENTER
                                ? Math.max(scale_x, scale_y) : Math.min(scale_x, scale_y));
                        final double width = scale * video_width;
                        final double height = scale * video_height;
                        Log.v(TAG, String.format("size(%1.0f,%1.0f),scale(%f,%f),mat(%f,%f)",
                                width, height, scale_x, scale_y, width / view_width, height / view_height));
                        Matrix.scaleM(mMvpMatrix, 0, (float) (width / view_width), (float) (height / view_height), 1.0f);
                        break;
                    }
                }
                if (mDrawer != null)
                    mDrawer.setMatrix(mMvpMatrix, 0);
            }
        }

        private volatile boolean requesrUpdateTex = false;
        private boolean flip = true;

        /**
         * drawing to GLSurface
         * we set renderMode to GLSurfaceView.RENDERMODE_WHEN_DIRTY,
         * this method is only called when #requestRender is called(= when texture is required to update)
         * if you don't set RENDERMODE_WHEN_DIRTY, this method is called at maximum 60fps
         */
        @Override
        public void onDrawFrame(final GL10 unused) {
            GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
            if (requesrUpdateTex && mSTexture != null) {
                requesrUpdateTex = false;
                // update texture(came from camera)
                mSTexture.updateTexImage();
                // get texture matrix
                mSTexture.getTransformMatrix(mStMatrix);
            }
            // draw to preview screen
            try{
                mDrawer.draw(hTex, mStMatrix);
                flip = !flip;
                if (flip) {    // ~30fps
                    synchronized (this) {
                        if (mVideoEncoder != null) {
                            // notify to capturing thread that the camera frame is available.
//						mVideoEncoder.frameAvailableSoon(mStMatrix);
                            mVideoEncoder.frameAvailableSoon(mStMatrix, mMvpMatrix);
                        }
                    }
                }
            }catch (Exception e){
                e.printStackTrace();
            }
        }

        @Override
        public void onFrameAvailable(final SurfaceTexture st) {
            requesrUpdateTex = true;
        }
    }
}
