package com.mlethe.library.camera.widget;

import android.app.Activity;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.os.Build;
import android.os.Handler;
import android.util.AttributeSet;

import androidx.annotation.RequiresApi;

import com.mlethe.library.camera.Camera2Interface;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

public class CameraGLSurfaceView extends GLSurfaceView implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener, Camera2Interface.PreviewCallback {

    // OES纹理
    private int mOESTextureId;

    private SurfaceTexture mSurfaceTexture;
    private DirectDrawer mDirectDrawer;

    public CameraGLSurfaceView(Context context) {
        this(context, null);
    }

    public CameraGLSurfaceView(Context context, AttributeSet attrs) {
        super(context, attrs);

        setEGLContextClientVersion(2);
        setRenderer(this);
        // 设置成RENDERMODE_WHEN_DIRTY，就是当有数据时才rendered或者主动调用了GLSurfaceView的requestRender。默认是连续模式，很显然Camera适合脏模式，一秒30帧，当有数据来时再渲染。
        setRenderMode(RENDERMODE_WHEN_DIRTY);
        setPreserveEGLContextOnPause(true);

        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
            Camera2Interface.init((Activity) context);
        }
    }

    @Override
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
        // 创建OES纹理
        mOESTextureId = createOESTextureObject();
        // 根据OES纹理ID实例化SurfaceTexture
        mSurfaceTexture = new SurfaceTexture(mOESTextureId);
        //当SurfaceTexture接收到一帧数据时，请求OpenGL ES进行渲染
        mSurfaceTexture.setOnFrameAvailableListener(this);
        mDirectDrawer = new DirectDrawer(mOESTextureId);
        // 打开相机
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
            Camera2Interface.getInstance()
                    .setPreviewCallback(this)
                    .openCamera(mSurfaceTexture);
        }
    }

    @Override
    public void onSurfaceChanged(GL10 gl, int width, int height) {
        // 设置视口的大小
        GLES20.glViewport(0, 0, width, height);

        /*if (!CameraInterface2.getInstance().isPreviewing()) {
            // 设置回调
            CameraInterface2.getInstance().setPreviewCallback(this);
            // 开启预览
            CameraInterface2.getInstance().doStartPreview(mSurfaceTexture, 1.33f);
        }*/
    }

    @Override
    public void onDrawFrame(GL10 gl) {
        GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
        mSurfaceTexture.updateTexImage();
        float[] mtx = new float[16];
        mSurfaceTexture.getTransformMatrix(mtx);
        mDirectDrawer.draw(mtx);
    }

    /**
     * 正因是RENDERMODE_WHEN_DIRTY所以就要告诉GLSurfaceView什么时候Render，也就是啥时候进到onDrawFrame()这个函数里。
     * SurfaceTexture.OnFrameAvailableListener这个接口就干了这么一件事，当有数据上来后会进到这里，然后执行requestRender()
     *
     * @param surfaceTexture
     */
    @Override
    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
        requestRender();
    }

    @Override
    public void onResume() {
        super.onResume();
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
            Camera2Interface.getInstance().startPreview();
        }
    }

    @Override
    public void onPause() {
        super.onPause();
        // 停止预览
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
            Camera2Interface.getInstance().stopPreview();
        }
    }

    @Override
    protected void onDetachedFromWindow() {
        super.onDetachedFromWindow();
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
            Camera2Interface.getInstance().closeCamera();
        }
        mOESTextureId = 0;
        mSurfaceTexture = null;
        mDirectDrawer = null;
    }

    /**
     * 创建一个OES纹理
     *
     * @return
     */
    public static int createOESTextureObject() {
        int[] tex = new int[1];
        GLES20.glGenTextures(1, tex, 0);
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]);
        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
        return tex[0];
    }

    @Override
    public void onCameraBrightnessChanged(boolean isDark) {

    }

    @Override
    public void onOpenCameraError() {

    }

    /**
     * A single preview frame will be returned to the handler supplied. The data will arrive as byte[]
     * in the message.obj field, with width and height encoded as message.arg1 and message.arg2,
     * respectively.
     *
     * @param handler The handler to send the message to.
     * @param message The what field of the message to be sent.
     */
    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    public void requestPreviewFrame(Handler handler, int message) {
        Camera2Interface.getInstance().requestPreviewFrame(handler, message);
    }
}
