package com.renfei.multimediatask.ui.task11.preview;

import android.Manifest;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.MeteringRectangle;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.Surface;

import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import androidx.core.app.ActivityCompat;

import com.renfei.multimediatask.R;
import com.renfei.multimediatask.base.BaseActivity;
import com.renfei.multimediatask.camera.Camera2Utils;
import com.renfei.multimediatask.interf.OnSurfaceCreateLinstener;
import com.renfei.multimediatask.ui.task3.PreviewCamera2Activity;
import com.renfei.multimediatask.util.OpenGlUtils;

import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.Arrays;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;

/**
 * 使用camera2 + OpenGL 做摄像头预览
 */
public class OpenglCamera2PreviewActivity extends BaseActivity {
    private static final String TAG = "OpenglCamera";


    private static final String TAG_PREVIEW = "预览";
    @BindView(R.id.glSurfaceView)
    GLSurfaceView glSurfaceView;


    //摄像头ID
    private String mCameraId;
    //预览尺寸
    private Size mPreviewSize;
    //摄像头设备
    private CameraDevice mCameraDevice;

    //预览请求
    private CaptureRequest.Builder mPreviewRequestBuilder;

    private CameraCaptureSession mCaptureSession;
    private CaptureRequest mPreviewRequest;


    /**
     * 摄像头线程操作
     */
    private HandlerThread mCameraThread;
    private Handler mCameraHandler;


    //预览 宽高
    private int previewWidth = 1280;
    private int previewHeight = 720;

    //渲染
    private CameraPreviewRender cameraPreviewRender;

    //渲染表面
    private Surface mPreviewSurface;
    private SurfaceTexture surfaceTexture;

    @Override
    protected int getLayoutId() {
        return R.layout.activity_opengl_camera2_preview;
    }


    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    @Override
    protected void initListener() {

        glSurfaceView.setEGLContextClientVersion(2);
        cameraPreviewRender = new OpenglCamera2PreviewActivity.CameraPreviewRender(new WeakReference<>(this));
        cameraPreviewRender.setOnSurfaceCreateLinstener(new OnSurfaceCreateLinstener() {
            @Override
            public void onSurfaceCreate(SurfaceTexture texture) {
                /**
                 * 将此surfaceTexture  设置为camera2 的预览表面
                 */
                surfaceTexture = texture;
                startPreview();
            }
        });
        glSurfaceView.setRenderer(cameraPreviewRender);
        glSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);

        initCamera(previewWidth, previewHeight);
        openCamera();
    }


    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    private void initCamera(int width, int height) {
        CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
        try {
            for (String cameraId : manager.getCameraIdList()) {
                //获取指定相机的参数配置
                CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
                //默认打开后置摄像头
                if (characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT)
                    continue;
                //获取StreamConfigurationMap，它是管理摄像头支持的所有输出格式和尺寸
                StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
                //根据TextureView的尺寸设置预览尺寸
                mPreviewSize = Camera2Utils.getOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height);
                mCameraId = cameraId;
                break;
            }
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }

    private void openCamera() {
        if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
            CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
            if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
                return;
            }
            try {
                manager.openCamera(mCameraId, new CameraDevice.StateCallback() {
                    @Override
                    public void onOpened(@NonNull CameraDevice camera) {
                        mCameraDevice = camera;
                    }

                    @Override
                    public void onDisconnected(@NonNull CameraDevice camera) {
                        camera.close();
                        mCameraDevice = null;
                    }

                    @Override
                    public void onError(@NonNull CameraDevice camera, int error) {
                        camera.close();
                        mCameraDevice = null;
                    }
                }, mCameraHandler);
            } catch (CameraAccessException e) {
                e.printStackTrace();
            }
        }
    }


    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    private void startPreview() {
        //这里暂时不给surfaceTexture  设置缓冲区大小  (不设置缓冲区大小时， 预览的图像有些失真)
        surfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(),mPreviewSize.getHeight());
        mPreviewSurface = new Surface(surfaceTexture);

        try {
            mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
            //设置预览的显示界面
            mPreviewRequestBuilder.addTarget(mPreviewSurface);
            mCameraDevice.createCaptureSession(Arrays.asList(mPreviewSurface),
                    new CameraCaptureSession.StateCallback() {
                        @Override
                        public void onConfigured(CameraCaptureSession session) {
                            mCaptureSession = session;
                            repeatPreview();
                        }

                        @Override
                        public void onConfigureFailed(CameraCaptureSession session) {

                        }
                    }, mCameraHandler);
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }

    @TargetApi(Build.VERSION_CODES.LOLLIPOP)
    private void repeatPreview() {
        mPreviewRequestBuilder.setTag(TAG_PREVIEW);
        mPreviewRequest = mPreviewRequestBuilder.build();
        //设置反复捕获数据的请求，这样预览界面就会一直有数据显示
        try {
            mCaptureSession.setRepeatingRequest(mPreviewRequest, new CameraCaptureSession.CaptureCallback() {
            }, mCameraHandler);
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }


    @Override
    protected void onResume() {
        super.onResume();
        startCameraThread();
    }

    @Override
    protected void onPause() {
        closeCamera();
        stopBackgroundThread();
        super.onPause();
    }

    private void stopBackgroundThread() {
        mCameraThread.quitSafely();
        try {
            mCameraThread.join();
            mCameraThread = null;
            mCameraHandler = null;
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }


    @TargetApi(Build.VERSION_CODES.LOLLIPOP)
    private void closeCamera() {
        if (null != mCaptureSession) {
            mCaptureSession.close();
            mCaptureSession = null;
        }
        if (null != mCameraDevice) {
            mCameraDevice.close();
            mCameraDevice = null;
        }
    }

    private void startCameraThread() {
        mCameraThread = new HandlerThread("CameraThread");
        mCameraThread.start();
        mCameraHandler = new Handler(mCameraThread.getLooper());
    }

    static class CameraPreviewRender implements GLSurfaceView.Renderer {
        private static final int FLOAT_SIZE = 4;//一个float数据，占几个字节 一个字节有8位
        private static final int POINT_VEC = 2;//坐标有两个向量 在平面中 只有x,y
        private static int STRIDE = 0;
        private WeakReference<OpenglCamera2PreviewActivity> weakReference;


        private float[] vertexData = new float[]{
                -1.0f, -1.0f,
                1.0f, -1.0f,
                -1.0f, 1.0f,
                1.0f, 1.0f
        };

        private float[] textureData = new float[]{
                0.0f, 1.0f,
                1.0f, 1.0f,
                0.0f, 0.0f,
                1.0f, 0.0f

        };


        private FloatBuffer vertexBuffer;
        private FloatBuffer textureBuffer;

        private int program;
        private int v_Position;//attribute vec4
        private int f_Position;//attribute vec2

        private int vboId;

        private int cameraTextureId;


        private SurfaceTexture surfaceTexture;
        private OnSurfaceCreateLinstener onSurfaceCreateLinstener;

        public CameraPreviewRender(WeakReference<OpenglCamera2PreviewActivity> weakReference) {
            Log.e(TAG, "CameraPreviewRender: 构造函数");
            this.weakReference = weakReference;

            STRIDE = POINT_VEC * FLOAT_SIZE;

            vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
                    .order(ByteOrder.nativeOrder())
                    .asFloatBuffer();
            vertexBuffer.put(vertexData).position(0);

            textureBuffer = ByteBuffer.allocateDirect(textureData.length * 4)
                    .order(ByteOrder.nativeOrder())
                    .asFloatBuffer();
            textureBuffer.put(textureData).position(0);
        }

        public void setOnSurfaceCreateLinstener(OnSurfaceCreateLinstener onSurfaceCreateLinstener) {
            this.onSurfaceCreateLinstener = onSurfaceCreateLinstener;
        }

        @Override
        public void onSurfaceCreated(GL10 gl, EGLConfig config) {
            Log.e(TAG, "CameraPreviewRender: onSurfaceCreated");

            String vertexShaderSource = OpenGlUtils.readShaderFromRawResource(weakReference.get(), R.raw.camera_preview_vertex_shader);
            String fragmentShaderSource = OpenGlUtils.readShaderFromRawResource(weakReference.get(), R.raw.camera_preview_fragment_shader);


            program = OpenGlUtils.createProgram(vertexShaderSource, fragmentShaderSource);


            v_Position = GLES20.glGetAttribLocation(program, "v_Position");
            OpenGlUtils.checkLocation(v_Position, "v_Position");
            f_Position = GLES20.glGetAttribLocation(program, "f_Position");
            OpenGlUtils.checkLocation(f_Position, "f_Position");


            //使用VBO
            createVbo();
            //创建摄像头纹理ID

            createCameraTextureID();
        }

        @Override
        public void onSurfaceChanged(GL10 gl, int width, int height) {
            GLES20.glViewport(0, 0, width, height);
            Log.e(TAG, "CameraPreviewRender: onSurfaceChanged");
        }

        @Override
        public void onDrawFrame(GL10 gl) {
            Log.e(TAG, "CameraPreviewRender: onDrawFrame");

            surfaceTexture.updateTexImage();//调用才会刷新

            GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
            GLES20.glClearColor(0.1f, 0.4f, 0.7f, 1.0f);

            GLES20.glUseProgram(program);

            GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);

            GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
            GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTextureId);
            GLES20.glEnableVertexAttribArray(v_Position);
            GLES20.glVertexAttribPointer(v_Position, POINT_VEC, GLES20.GL_FLOAT, false, STRIDE, 0);
            GLES20.glEnableVertexAttribArray(f_Position);
            GLES20.glVertexAttribPointer(f_Position, POINT_VEC, GLES20.GL_FLOAT, false, STRIDE, vertexData.length * FLOAT_SIZE);
            GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
            GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
            GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
        }


        private void createCameraTextureID() {
            int[] textures = new int[1];
            GLES20.glGenTextures(1, textures, 0);
            cameraTextureId = textures[0];

            GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTextureId);

            GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
            GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
            GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
            GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
            GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);


            surfaceTexture = new SurfaceTexture(cameraTextureId);
            if (null != onSurfaceCreateLinstener) {
                onSurfaceCreateLinstener.onSurfaceCreate(surfaceTexture);
            }

        }


        //VBO
        private void createVbo() {
            int[] vboArr = new int[1];
            GLES20.glGenBuffers(1, vboArr, 0);
            vboId = vboArr[0];

            GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
            GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, (vertexData.length + textureData.length) * FLOAT_SIZE,
                    null, GLES20.GL_STATIC_DRAW);

            GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, 0, vertexData.length * FLOAT_SIZE, vertexBuffer);
            GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, vertexData.length * FLOAT_SIZE, textureData.length * FLOAT_SIZE, textureBuffer);
            GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
        }

    }
}
