package com.example.ar_10.myobj;

import android.graphics.PixelFormat;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.Log;

import com.example.ar_10.base.MatrixState;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

/**
 * 场景的渲染器
 *
 * @author xuebb
 */
public class PipeRenderer {

    private static final String TAG = "PipeRenderer";
    private static float[] xyzCamera;
    private static MatrixState matrixState;
    private static final PositionData positionData = new PositionData();
    private PipeGroup mSpriteGroup = null;


    /**
     * UI
     */

    public PlaneGlSurfaceView mGLSurfaceView;

    public PipeRenderer(PlaneGlSurfaceView glSurfaceView, Renderer renderer) {

        this.mGLSurfaceView = glSurfaceView;

        glSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
        glSurfaceView.getHolder().setFormat(PixelFormat.TRANSLUCENT);
        glSurfaceView.setRenderer(new GLSurfaceView.Renderer() {
            @Override
            public void onSurfaceCreated(GL10 gl, EGLConfig config) {
                // TODO GlThread
                GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0f);
                //开启混合
                gl.glEnable(GL10.GL_BLEND);
                gl.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA);
                // 启用深度测试
                GLES20.glEnable(GLES20.GL_DEPTH_TEST);
                // 设置为打开背面剪裁
                GLES20.glEnable(GLES20.GL_CULL_FACE);
                // 初始化变换矩阵
                matrixState.setInitStack();

                // camera matrix
                matrixState.setCamera(PositionData.EYE_X, PositionData.EYE_Y, PositionData.EYE_Z,
                        PositionData.VIEW_CENTER_X, PositionData.VIEW_CENTER_Y, PositionData.VIEW_CENTER_Z,
                        0f, 1f, 0f);

                matrixState.setLightLocation(10000000, 10000000, 10000000);

                //读取obj模型
                initUI();

//                renderer.onSurfaceCreated(PipeRenderer.this);

            }

            @Override
            public void onSurfaceChanged(GL10 gl, int width, int height) {
                // viewPort
                /*此处的width、height是屏幕的显示宽高，
                 * 在文献中是lx、ly，设置的是在OpenGL显示窗口的大小，能够完整显示影像
                 * 在OpenGL ES中，对应的是手机的屏幕显示范围
                 * */
                GLES20.glViewport(0, 0, width, height);// w:1080 h:2137

                calculateProjecttionMatrix(width, height);

                //自定义线程
                ChangeCameraThread chgthread = new ChangeCameraThread();
                chgthread.start();
//                renderer.onSurfaceChanged(PipeRenderer.this, width, height);

            }

            @Override
            public void onDrawFrame(GL10 gl) {
                // TODO GlThread
                // 清除深度缓冲与颜色缓冲
                GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);

                /**
                 * 绘制物体
                 */
                Log.d(TAG, "onDrawFrame");
                matrixState.pushMatrix();
                mSpriteGroup.onDraw(matrixState);
                matrixState.popMatrix();
//                renderer.onDrawFrame(PipeRenderer.this);

            }
        });
        glSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);

        matrixState = new MatrixState();

        // 初始化obj+mtl文件
        mSpriteGroup = new PipeGroup(mGLSurfaceView);
    }

    /**
     * 初始化场景中的精灵实体类
     */
    private void initUI() {
        mSpriteGroup.initObjs();
    }


    /**
     * 更新相机进程
     */
    public class ChangeCameraThread extends Thread {
        public boolean flag = true;

        @Override
        public void run() {
            while (flag) {

                xyzCamera = PositionData.getXyzCamera();
                //更新相机矩阵
                float[] R = World2CameraMatrix();
                matrixState.setmVMatrix(R);
                mGLSurfaceView.requestRender(); //重绘画面
                try {
                    Thread.sleep(20);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        }
    }


    /**
     * 计算模型和相机的距离
     *
     * @return
     */
    public static float getDistance_V4M() {
        float[] xyzModel = PositionData.getXyzModel();
        float distance_V2M = (float) Math.sqrt(Math.pow(xyzCamera[0] - xyzModel[0], 2)
                + Math.pow(xyzCamera[1] - xyzModel[1], 2)
                + Math.pow(xyzCamera[2] - xyzModel[2], 2)
        );
        return distance_V2M;
    }

    /**
     * Camera外参计算（视变换矩阵M计算）
     *
     * @return
     */
    public static float[] World2CameraMatrix() {
        float[] mvMatrix;
        mvMatrix = positionData.getMvMatrix();
        return mvMatrix;
    }

    /**
     * 相机内参计算与投影矩阵赋值
     *
     * @param width  显示宽度
     * @param height 显示高度
     */
    public void calculateProjecttionMatrix(float width, float height) {
        /**
         *                  - Fx 0 Cx -
         * K(相机内参矩阵) =  | 0 Fy Cy |
         *                  - 0  0  1 -
         *
         *                               -   2*Fx/w     0      1-(2*Cx/w)         0           -
         * ProjectFrustum(投影矩阵4×4) =  |     0     2*Fy/h   (2*Cy/h)-1          0           |
         *                              |     0       0     -(f+n)/(f-n)   -(2f*n)/(f-n)     |
         *                              -     0       0          -1              0          -
         *
         * 参考博客：https://blog.csdn.net/qq_33446100/article/details/96845829?spm=1001.2101.3001.6650.7&utm_medium=distribute.pc_relevant.none-task-blog-2%7Edefault%7EBlogCommendFromBaidu%7Edefault-7-96845829-blog-54431209.pc_relevant_multi_platform_whitelistv2&depth_1-utm_source=distribute.pc_relevant.none-task-blog-2%7Edefault%7EBlogCommendFromBaidu%7Edefault-7-96845829-blog-54431209.pc_relevant_multi_platform_whitelistv2&utm_relevant_index=11
         */

        float near_plane = positionData.PROJECTION_NEAR;
        float far_plane = positionData.PROJECTION_FAR;
        float CameraResolutionX = 1824;//相机分辨率 4000*1824
        float CameraResolutionY = 4000;
        float Cx = CameraResolutionX / 2.0f;
        float Cy = CameraResolutionY / 2.0f;
        float Fx = 3402.0081020896441f;
        float Fy = 3402.0081020896441f;

        float[] projection_matrix = matrixState.getmProjMatrix();

        projection_matrix[0] = 2 * Fx / width;
        projection_matrix[1] = 0.0f;
        projection_matrix[2] = 0.0f;
        projection_matrix[3] = 0.0f;

        projection_matrix[4] = 0.0f;
        projection_matrix[5] = 2 * Fy / height;
        projection_matrix[6] = 0.0f;
        projection_matrix[7] = 0.0f;

        projection_matrix[8] = 1.0f - 2 * Cx / width;
        projection_matrix[9] = 2 * Cy / height - 1.0f;
        projection_matrix[10] = -(far_plane + near_plane) / (far_plane - near_plane);
        projection_matrix[11] = -1.0f;

        projection_matrix[12] = 0.0f;
        projection_matrix[13] = 0.0f;
        projection_matrix[14] = -2.0f * far_plane * near_plane / (far_plane - near_plane);
        projection_matrix[15] = 0.0f;
    }

    /**
     * Interface to be implemented for rendering callbacks.
     */
    public static interface Renderer {
        /**
         * Called by {@link PipeRenderer} when the GL render surface is created.
         *
         * <p>See {@link GLSurfaceView.Renderer#onSurfaceCreated}.
         */
        public void onSurfaceCreated(PipeRenderer render);

        /**
         * Called by {@link PipeRenderer} when the GL render surface dimensions are changed.
         *
         * <p>See {@link GLSurfaceView.Renderer#onSurfaceChanged}.
         */
        public void onSurfaceChanged(PipeRenderer render, int width, int height);

        /**
         * Called by {@link PipeRenderer} when a GL frame is to be rendered.
         *
         * <p>See {@link GLSurfaceView.Renderer#onDrawFrame}.
         */
        public void onDrawFrame(PipeRenderer render);
    }
}
