package com.renfei.multimediatask.ui.task11.preview;

import android.app.Activity;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.util.Log;
import android.view.Display;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.WindowManager;

import androidx.appcompat.app.AppCompatActivity;

import com.renfei.multimediatask.R;
import com.renfei.multimediatask.base.BaseActivity;
import com.renfei.multimediatask.interf.OnSurfaceCreateLinstener;
import com.renfei.multimediatask.util.CameraUtils;
import com.renfei.multimediatask.util.OpenGlUtils;

import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import butterknife.BindView;
import butterknife.ButterKnife;

import static com.renfei.multimediatask.util.CameraUtils.*;

/**
 * 这个时候，我们发现  预览的摄像头数据，是旋转的，通过下一任务，使用矩阵将其旋转过来
 */
public class OpenglCameraPreviewActivity extends BaseActivity implements SurfaceTexture.OnFrameAvailableListener {

    private static final String TAG = "OpenglCamera";
    @BindView(R.id.glSurfaceView)
    GLSurfaceView glSurfaceView;


    Camera camera;
    private int cameraId = Camera.CameraInfo.CAMERA_FACING_BACK;

    private CameraPreviewRender cameraPreviewRender;


    @Override
    protected int getLayoutId() {
        return R.layout.activity_opengl_camera_preview;
    }

    @Override
    protected void initListener() {
        glSurfaceView.setEGLContextClientVersion(2);
        cameraPreviewRender = new CameraPreviewRender(new WeakReference<>(this));
        cameraPreviewRender.setOnSurfaceCreateLinstener(new OnSurfaceCreateLinstener() {
            @Override
            public void onSurfaceCreate(SurfaceTexture surfaceTexture) {
                Log.e(TAG, "CameraPreviewRender: onSurfaceCreate");
                surfaceTexture.setOnFrameAvailableListener(OpenglCameraPreviewActivity.this);
                try {
                    camera.setPreviewTexture(surfaceTexture);

//                    这个方法一定要在camera.setPreviewDisplay(surfaceHolder)这个之后，启动相机预览之前调用。
                    setCameraDisplayOrientation();
                    camera.startPreview();
                    Log.e(TAG, "camera: startPreview");
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        });
        glSurfaceView.setRenderer(cameraPreviewRender);
        glSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);

    }

    @Override
    protected void onResume() {
        super.onResume();
        if (null == camera) {
            openCamera(1280, 720);
        }
    }


    @Override
    protected void onPause() {
        super.onPause();
        releaseCamera();
    }


    private void openCamera(int width, int height) {
        camera=Camera.open(cameraId);

        if (camera == null) {
            Log.e(TAG, "没有找到前置摄像头，打开默认摄像头");
            camera = Camera.open();    // 默认打开后置摄像头
        }
        if (camera == null) {
            throw new RuntimeException("打开摄像头失败");
        }
        Camera.Parameters parms = camera.getParameters();
        choosePreviewSize(parms, width, height);

        parms.setRotation(90);
        camera.setParameters(parms);

//        camera.setDisplayOrientation(getRecommendCameraRotateDegrees(cameraId));


    }

    /**
     * 设置相机显示方向的详细解读
     **/
    public void setCameraDisplayOrientation() {
        // 1.获取屏幕切换角度值。
        int rotation = getWindowManager().getDefaultDisplay()
                .getRotation();
        int degrees = 0;
        switch (rotation) {
            case Surface.ROTATION_0:
                degrees = 0;
                break;
            case Surface.ROTATION_90:
                degrees = 90;
                break;
            case Surface.ROTATION_180:
                degrees = 180;
                break;
            case Surface.ROTATION_270:
                degrees = 270;
                break;
        }
        // 2.获取摄像头方向。
        android.hardware.Camera.CameraInfo info =
                new android.hardware.Camera.CameraInfo();
        android.hardware.Camera.getCameraInfo(cameraId, info);
        // 3.设置相机显示方向。
        int result;
        if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
            result = (info.orientation + degrees) % 360;
            result = (360 - result) % 360;  // compensate the mirror
        } else {  // back-facing
            result = (info.orientation - degrees + 360) % 360;
        }
        camera.setDisplayOrientation(result);
    }


    private void releaseCamera() {
        if (null != camera) {
            camera.stopPreview();
            camera.release();
            camera = null;
        }
    }

    /**
     * 官方推荐写法， 适配大多数手机 (不怎么起作用  目前手机 三星 note8)
     *
     * @param cameraId
     */
    public int getRecommendCameraRotateDegrees(int cameraId) {
        Camera.CameraInfo info = new Camera.CameraInfo();
        Camera.getCameraInfo(cameraId, info);
        int rotation = getWindowManager().getDefaultDisplay().getRotation();
        int degrees = 0;

        switch (rotation) {
            case Surface.ROTATION_0:
                degrees = 0;
                break;
            case Surface.ROTATION_90:
                degrees = 90;
                break;
            case Surface.ROTATION_180:
                degrees = 180;
                break;
            case Surface.ROTATION_270:
                degrees = 270;
                break;
        }
        int result = 0;
        if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
            result = (info.orientation + degrees) % 360;
            result = (360 - result) % 360;
        } else {
            result = (info.orientation - degrees + 360) % 360;
        }

        return result;
    }


    @Override
    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
        Log.e(TAG, "onFrameAvailable: ");
    }

    static class CameraPreviewRender implements GLSurfaceView.Renderer {
        private static final int FLOAT_SIZE = 4;//一个float数据，占几个字节 一个字节有8位
        private static final int POINT_VEC = 2;//坐标有两个向量 在平面中 只有x,y
        private static int STRIDE = 0;
        private WeakReference<OpenglCameraPreviewActivity> weakReference;


        private float[] vertexData = new float[]{
                -1.0f, -1.0f,
                1.0f, -1.0f,
                -1.0f, 1.0f,
                1.0f, 1.0f
        };

        private float[] textureData = new float[]{
                0.0f, 1.0f,
                1.0f, 1.0f,
                0.0f, 0.0f,
                1.0f, 0.0f

        };


        private FloatBuffer vertexBuffer;
        private FloatBuffer textureBuffer;

        private int program;
        private int v_Position;//attribute vec4
        private int f_Position;//attribute vec2

        private int vboId;

        private int cameraTextureId;


        private SurfaceTexture surfaceTexture;
        private OnSurfaceCreateLinstener onSurfaceCreateLinstener;

        public CameraPreviewRender(WeakReference<OpenglCameraPreviewActivity> weakReference) {
            Log.e(TAG, "CameraPreviewRender: 构造函数");
            this.weakReference = weakReference;

            STRIDE = POINT_VEC * FLOAT_SIZE;

            vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
                    .order(ByteOrder.nativeOrder())
                    .asFloatBuffer();
            vertexBuffer.put(vertexData).position(0);

            textureBuffer = ByteBuffer.allocateDirect(textureData.length * 4)
                    .order(ByteOrder.nativeOrder())
                    .asFloatBuffer();
            textureBuffer.put(textureData).position(0);
        }

        public void setOnSurfaceCreateLinstener(OnSurfaceCreateLinstener onSurfaceCreateLinstener) {
            this.onSurfaceCreateLinstener = onSurfaceCreateLinstener;
        }

        @Override
        public void onSurfaceCreated(GL10 gl, EGLConfig config) {
            Log.e(TAG, "CameraPreviewRender: onSurfaceCreated");

            String vertexShaderSource = OpenGlUtils.readShaderFromRawResource(weakReference.get(), R.raw.camera_preview_vertex_shader);
            String fragmentShaderSource = OpenGlUtils.readShaderFromRawResource(weakReference.get(), R.raw.camera_preview_fragment_shader);


            program = OpenGlUtils.createProgram(vertexShaderSource, fragmentShaderSource);


            v_Position = GLES20.glGetAttribLocation(program, "v_Position");
            OpenGlUtils.checkLocation(v_Position, "v_Position");
            f_Position = GLES20.glGetAttribLocation(program, "f_Position");
            OpenGlUtils.checkLocation(f_Position, "f_Position");


            //使用VBO
            createVbo();
            //创建摄像头纹理ID

            createCameraTextureID();
        }

        @Override
        public void onSurfaceChanged(GL10 gl, int width, int height) {
            GLES20.glViewport(0, 0, width, height);
            Log.e(TAG, "CameraPreviewRender: onSurfaceChanged");
        }

        @Override
        public void onDrawFrame(GL10 gl) {
            Log.e(TAG, "CameraPreviewRender: onDrawFrame");

            surfaceTexture.updateTexImage();//调用才会刷新

            GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
            GLES20.glClearColor(0.1f, 0.4f, 0.7f, 1.0f);

            GLES20.glUseProgram(program);

            GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);

            GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
            GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTextureId);
            GLES20.glEnableVertexAttribArray(v_Position);
            GLES20.glVertexAttribPointer(v_Position, POINT_VEC, GLES20.GL_FLOAT, false, STRIDE, 0);
            GLES20.glEnableVertexAttribArray(f_Position);
            GLES20.glVertexAttribPointer(f_Position, POINT_VEC, GLES20.GL_FLOAT, false, STRIDE, vertexData.length * FLOAT_SIZE);
            GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
            GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
            GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
        }


        private void createCameraTextureID() {
            int[] textures = new int[1];
            GLES20.glGenTextures(1, textures, 0);
            cameraTextureId = textures[0];

            GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTextureId);

            GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
            GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
            GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
            GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
            GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);


            surfaceTexture = new SurfaceTexture(cameraTextureId);
            if (null != onSurfaceCreateLinstener) {
                onSurfaceCreateLinstener.onSurfaceCreate(surfaceTexture);
            }

        }


        //VBO
        private void createVbo() {
            int[] vboArr = new int[1];
            GLES20.glGenBuffers(1, vboArr, 0);
            vboId = vboArr[0];

            GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
            GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, (vertexData.length + textureData.length) * FLOAT_SIZE,
                    null, GLES20.GL_STATIC_DRAW);

            GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, 0, vertexData.length * FLOAT_SIZE, vertexBuffer);
            GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, vertexData.length * FLOAT_SIZE, textureData.length * FLOAT_SIZE, textureBuffer);
            GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
        }

    }

}
