package com.dream.libxrec.view;

import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.Log;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

public class YUVSurfaceViewRender implements GLSurfaceView.Renderer {

    private final String TAG = "GLSurfaceViewRender";

    private byte[] mYUVData;//摄像头YUV原始数据
    private ByteBuffer yBuffer;
    private ByteBuffer uvBuffer;

    private int yTextureHandle;
    private int uvTextureHandle;
    private int[] yTextureNames;
    private int[] uvTextureNames;

    private FloatBuffer vertices;
    private ShortBuffer indices;

    private int mProgram;
    private boolean mBoolean = false;

    private int positionHandle;
    private int texCoordHandle;

    private int mImageWidth = 0;
    private int mImageHeight = 0;

    public static int RENDER_TYPE_NV21;
    public static int RENDER_TYPE_NV12;

    private int mRenderType = RENDER_TYPE_NV12;//默认渲染类型为NV12

    public YUVSurfaceViewRender(){}

    /**
     *
     * @param renderType 渲染数据类型：RENDER_TYPE_NV21,RENDER_TYPE_NV12
     * @param width YUV数据的宽
     * @param height YUV数据的高
     */
    public void init(int renderType,int width,int height){
        if (renderType == RENDER_TYPE_NV12||renderType == RENDER_TYPE_NV21){
            this.mRenderType = renderType;
        }

        this.mImageHeight = height;
        this.mImageWidth = width;

        //The vertices and indices of our mesh that we will draw the camera preview image on
        float[] verticesData = {
                -1.f, 1.f, // Position 0
                0.0f, 0.0f, // TexCoord 0
                -1.f, -1.f, // Position 1
                0.0f, 1.0f, // TexCoord 1
                1.f, -1.f, // Position 2
                1.0f, 1.0f, // TexCoord 2
                1.f, 1.f, // Position 3
                1.0f, 0.0f // TexCoord 3
        };
        vertices = ByteBuffer.allocateDirect(verticesData.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
        vertices.put(verticesData);
        vertices.position(0);

        short[] indicesData = {0, 1, 2, 0, 2, 3};
        indices = ByteBuffer.allocateDirect(indicesData.length * 2).order(ByteOrder.nativeOrder()).asShortBuffer();
        indices.put(indicesData);
        indices.position(0);

        yBuffer = ByteBuffer.allocateDirect(mImageWidth * mImageHeight).order(ByteOrder.nativeOrder());
        uvBuffer = ByteBuffer.allocateDirect(mImageWidth * mImageHeight /2).order(ByteOrder.nativeOrder()); //We have (width/2*height/2) pixels, each pixel is 2 bytes
    }

    public void onFrame(byte[] data){
        this.mYUVData = data;
    }

    /**
     * Draws the image to the background.
     *
     * @param yuv The YUV-NV21 or NV12 image to be draw
     */
    private void renderBackground(byte[] yuv){

        yBuffer.put(yuv, 0, this.mImageWidth *this.mImageHeight);
        yBuffer.position(0);

        //Copy the UV channels of the image into their buffer, the following (width*height/2) bytes are the UV channel; the U and V bytes are interspread
        uvBuffer.put(yuv, mImageWidth * mImageHeight, mImageWidth * mImageHeight /2);
        uvBuffer.position(0);

        vertices.position(0);
        GLES20.glVertexAttribPointer(positionHandle, 2, GLES20.GL_FLOAT, false, 4*4, vertices);

        //Load the texture coordinate
        vertices.position(2);
        GLES20.glVertexAttribPointer(texCoordHandle, 2, GLES20.GL_FLOAT, false, 4*4, vertices);

        //Load our vertex array into the shader
        GLES20.glEnableVertexAttribArray(positionHandle);
        GLES20.glEnableVertexAttribArray(texCoordHandle);

        //Set texture slot 0 as active and bind our texture object to it
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yTextureNames[0]);

        //Y texture is (width*height) in size and each pixel is one byte; by setting GL_LUMINANCE, OpenGL puts this byte into R,G and B components of the texture
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, mImageWidth, mImageHeight,
                0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, yBuffer);

        //Use linear interpolation when magnifying/minifying the texture to areas larger/smaller than the texture size
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);

        //Set the uniform y_texture object in the shader code to the texture at slot 0
        GLES20.glUniform1i(yTextureHandle, 0);

        /*
         * Load the UV texture
         */

        //Set texture slot 1 as active and bind our texture object to it
        GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, uvTextureNames[0]);

        //UV texture is (width/2*height/2) in size (downsampled by 2 in both dimensions, each pixel corresponds to 4 pixels of the Y channel)
        //and each pixel is two bytes. By setting GL_LUMINANCE_ALPHA, OpenGL puts first byte (V) into R,G and B components and of the texture
        //and the second byte (U) into the A component of the texture. That's why we find U and V at A and R respectively in the fragment shader code.
        //Note that we could have also found V at G or B as well.
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE_ALPHA, mImageWidth /2, mImageHeight /2,
                0, GLES20.GL_LUMINANCE_ALPHA, GLES20.GL_UNSIGNED_BYTE, uvBuffer);

        //Use linear interpolation when magnifying/minifying the texture to areas larger/smaller than the texture size
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);

        GLES20.glUniform1i(uvTextureHandle, 1);

        GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6, GLES20.GL_UNSIGNED_SHORT, indices);

        GLES20.glDisableVertexAttribArray(positionHandle);
        GLES20.glDisableVertexAttribArray(texCoordHandle);
    }

    /**
     * Compiles the given shader code and returns its program handle.
     *
     * @param type The type of the shader
     * @param shaderCode The GLSL source code of the shader
     * @return The program handle of the compiled shader
     */
    private int loadShader(int type, String shaderCode) {
        //Create the shader object
        int[] compiled = new int[1];
        int shader = GLES20.glCreateShader(type);
        if(shader == 0)
            return 0;
        //Load the shader source
        GLES20.glShaderSource(shader, shaderCode);

        //Compile the shader
        GLES20.glCompileShader(shader);

        //Check the compile status
        GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);

        if(compiled[0] == 0){
            Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
            GLES20.glDeleteShader(shader);
            return 0;
        }
        return shader;
    }

    private void creatProgram() {

        int[] linked = new int[1];

        String vertexShaderCode = "attribute vec4 a_position;                             \n" +
                "attribute vec2 a_texCoord;                             \n" +
                "varying vec2 v_texCoord;                               \n" +
                "void main(){                                           \n" +
                "  gl_Position = a_position;                            \n" +
                "  v_texCoord = a_texCoord;                             \n" +
                "}                                                      \n";
        int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);

        if(vertexShader == 0)
            return;

        //NV12和NV21的区别就在于UV的顺序不同，所以改下先后读就可以兼容两者了
        String yuv_zy = mRenderType == RENDER_TYPE_NV12?
                "   yuv.z = texture2D(uv_texture, v_texCoord).a-0.5; \n" +
                "   yuv.y = texture2D(uv_texture, v_texCoord).r - 0.5; \n"
                :
                "   yuv.z = texture2D(uv_texture,v_texCoord).r-0.5; \n" +
                "   yuv.y = texture2D(uv_texture, v_texCoord).a - 0.5; \n"
                ;

        String fragmentShaderCode = "#ifdef GL_ES                                   \n" +
                "precision highp float;                         \n" +
                "#endif                                         \n" +
                "varying vec2 v_texCoord;                       \n" +
                "uniform sampler2D y_texture;                   \n" +
                "uniform sampler2D uv_texture;                  \n" +
                "const float PI = 3.14159265;                   \n" +
                "const mat3 convertMat = mat3(1.0, 1.0, 1.0, 0.0, -0.39456, 2.03211, 1.13983, -0.58060, 0.0); \n" +
                "void main(){                                   \n" +
                "   vec3 yuv;                                   \n" +
                "   yuv.x = texture2D(y_texture, v_texCoord).r; \n" +
                yuv_zy +
                "   vec3 color = convertMat * yuv;              \n" +
                "   vec4 mainColor = vec4(color, 1.0);          \n" +
                "   gl_FragColor = mainColor;                   \n" +
                "}                                              \n";
        int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
        if(fragmentShader == 0){
            GLES20.glDeleteShader(vertexShader);
            return;
        }

        //Create the program object
        mProgram = GLES20.glCreateProgram();
        if(mProgram == 0)
            return;

        // 添加顶点着色器到程序中
        GLES20.glAttachShader(mProgram, vertexShader);

        // 添加片段着色器到程序中
        GLES20.glAttachShader(mProgram, fragmentShader);

        //Link the program
        GLES20.glLinkProgram(mProgram);

        GLES20.glGetProgramiv(mProgram, GLES20.GL_LINK_STATUS, linked, 0);
        if(linked[0] == 0){
            Log.e(TAG, "Error linking program:");
            Log.e(TAG, GLES20.glGetProgramInfoLog(mProgram));
            GLES20.glDeleteProgram(mProgram);
            return;
        }
        //Free up no longer needed shader resources
        GLES20.glDeleteShader(vertexShader);
        GLES20.glDeleteShader(fragmentShader);
    }

    //添加程序到ES环境中
    private void activeProgram() {
        // 将程序添加到OpenGL ES环境
        GLES20.glUseProgram(mProgram);

        //Get the attribute locations
        positionHandle = GLES20.glGetAttribLocation(mProgram, "a_position");
        texCoordHandle = GLES20.glGetAttribLocation(mProgram, "a_texCoord");

        //Create the Y texture object
        GLES20.glEnable(GLES20.GL_TEXTURE_2D);
        yTextureHandle = GLES20.glGetUniformLocation(mProgram, "y_texture");
        yTextureNames = new int[1];
        GLES20.glGenTextures(1, yTextureNames, 0);

        //Create the UV texture object
        GLES20.glEnable(GLES20.GL_TEXTURE_2D);
        uvTextureHandle = GLES20.glGetUniformLocation(mProgram, "uv_texture");
        uvTextureNames = new int[1];
        GLES20.glGenTextures(1, uvTextureNames, 0);

        //Clear the screen
        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
    }

    @Override
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
        creatProgram();
        activeProgram();
        mBoolean = false;
    }

    @Override
    public void onSurfaceChanged(GL10 gl, int width, int height) {
        GLES20.glActiveTexture(GLES20.GL_ACTIVE_TEXTURE);
        GLES20.glViewport(0, 0, width, height);
    }

    @Override
    public void onDrawFrame(GL10 gl) {
        if(mBoolean){
            activeProgram();
            mBoolean = false;
        }

        if(mYUVData != null){
            renderBackground(mYUVData);
        }
    }
}