package com.facepp.demo.video.layer;

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.media.MediaPlayer;
import android.net.Uri;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.util.Log;
import android.view.Surface;

import com.facepp.demo.BitmapFaceDetector;
import com.facepp.demo.FaceApplication;
import com.facepp.demo.R;
import com.facepp.demo.util.GlUtil;
import com.facepp.demo.util.TextResourceReader;
import com.facepp.demo.util.Uv21Helper;
import com.megvii.facepp.sdk.Facepp;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;

/**
 * Create by SongChao on 2019/2/13
 */
public class NormalVideoLayer implements IDrawLayer, SurfaceTexture.OnFrameAvailableListener {

    private MediaPlayer mMediaPlayer = new MediaPlayer();

    private Uri videoUrl;
    private Context mContext;

    private int mTextureId;
    private SurfaceTexture mSurfaceTexture;

    private FloatBuffer mVertexBuffer;
    private ShortBuffer mDrawListBuffer;
    private FloatBuffer mUVTexVertexBuffer;
    private int mProgram = 0;
    private int mPositionHandle = 0;
    private int mTextureCoordinatorHandle = 0;
    private int mMVPMatrixHandle = 0;
    private int mTextureHandle = 0;

    private float mVertex[] = {1f, 1f, 0f,    // top right
            -1f, 1f, 0f, // top left
            -1f, -1f, 0f, // bottom left
            1f, -1f, 0f // bottom right
    };

    private float mUVTexVertex[] = {1f, 0f, 0f, 0f, 0f, 1f, 1f, 1f};

    private short DRAW_ORDER[] = {0, 1, 2, 2, 0, 3};

    private float mMVP[] = new float[16];

    private BitmapFaceDetector faceDetector;
    private int width;
    private int height;

    public NormalVideoLayer(Context context, Uri videoUrl) {
        this.mContext = context;
        this.videoUrl = videoUrl;
    }

    @Override
    public void onSurfaceCreated(boolean isRecord) {
        prepare(isRecord);
        start();
    }


    @Override
    public void prepare(boolean isRecord) {
        try {
            mMediaPlayer.setDataSource(mContext, videoUrl);
            mMediaPlayer.setLooping(true);
            // mute when record
            if (isRecord) {
                mMediaPlayer.setVolume(0, 0);
            }
            mMediaPlayer.prepare();
        } catch (Exception e) {
            e.printStackTrace();
        }

        int textures[] = GlUtil.createTextureID(1);
        if (textures.length > 0) {
            mTextureId = textures[0];
            mSurfaceTexture = new SurfaceTexture(mTextureId);
            mSurfaceTexture.setOnFrameAvailableListener(this);
        }
        initShader();
    }

    private void initShader() {
        String vertexShader = TextResourceReader.readTextFileFromResource(mContext, R.raw.video_vertex_shader);
        String fragmentShader = TextResourceReader.readTextFileFromResource(mContext, R.raw
                .video_normal_fragment_shader);

        mProgram = GlUtil.createProgram(vertexShader, fragmentShader); // create vertex's shader and fragment's
        // shader, add to shader for build
        if (mProgram == 0) {
            return;
        }
        mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
        GlUtil.checkLocation(mPositionHandle, "vPosition");

        mTextureCoordinatorHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
        GlUtil.checkLocation(mTextureCoordinatorHandle, "inputTextureCoordinate");

        mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
        GlUtil.checkLocation(mMVPMatrixHandle, "uMVPMatrix");

        mTextureHandle = GLES20.glGetUniformLocation(mProgram, "s_texture");
        GlUtil.checkLocation(mTextureHandle, "s_texture");

        mDrawListBuffer = ByteBuffer.allocateDirect(DRAW_ORDER.length * 2).order(ByteOrder.nativeOrder())
                .asShortBuffer().put(DRAW_ORDER);
        mVertexBuffer = ByteBuffer.allocateDirect(mVertex.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer()
                .put(mVertex);
        mUVTexVertexBuffer = ByteBuffer.allocateDirect(mUVTexVertex.length * 4).order(ByteOrder.nativeOrder())
                .asFloatBuffer().put(mUVTexVertex);

        mUVTexVertexBuffer.position(0);
        mDrawListBuffer.position(0);
        mVertexBuffer.position(0);
        Matrix.setIdentityM(mMVP, 0);
    }

    @Override
    public void onSurfaceChanged(int width, int height) {
        this.width = width;
        this.height = height;
        GLES20.glViewport(0, 0, width, height);
        if (faceDetector != null) {
            faceDetector.setSizeChange(width, height);
        }
    }

    @Override
    public void drawFrame() {
        long start = System.currentTimeMillis();
        mSurfaceTexture.updateTexImage();
        GLES20.glUseProgram(mProgram);

        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);

        GLES20.glEnableVertexAttribArray(mPositionHandle);
        GLES20.glVertexAttribPointer(mPositionHandle, 3, GLES20.GL_FLOAT, false, 0, mVertexBuffer);

        GLES20.glEnableVertexAttribArray(mTextureCoordinatorHandle);
        GLES20.glVertexAttribPointer(mTextureCoordinatorHandle, 2, GLES20.GL_FLOAT, false, 0, mUVTexVertexBuffer);

        GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVP, 0);
        GLES20.glUniform1i(mTextureHandle, 0);

        GLES20.glDrawElements(GLES20.GL_TRIANGLES, DRAW_ORDER.length, GLES20.GL_UNSIGNED_SHORT, mDrawListBuffer);

        GLES20.glDisableVertexAttribArray(mPositionHandle);
        GLES20.glDisableVertexAttribArray(mTextureCoordinatorHandle);
        GLES20.glDisableVertexAttribArray(mMVPMatrixHandle);
        GLES20.glDisableVertexAttribArray(mTextureHandle);

        // detect faces and swap
//        detectFacesAndSwap();
    }


    /**
     * 检测人脸关键点和替换人脸
     */
    private void detectFacesAndSwap() {
        Bitmap frameBitmap = null;
        try {
            if (faceDetector == null) {
                faceDetector = new BitmapFaceDetector(FaceApplication.sContext, this.width, this.height);
            }

            frameBitmap = Uv21Helper.readGlFrameBitmap(width, height);
            byte[] uv21Data = Uv21Helper.convertToNV21(frameBitmap, width, height);
            Facepp.Face[] faces = faceDetector.detectFaces(uv21Data, Facepp.IMAGEMODE_NV21);
            // todo : do your work

        } catch (Exception e) {
            e.printStackTrace();
            Log.e("", "### createBitmapFromGLSurface : " + e);
        } finally {
            if (frameBitmap != null) {
                frameBitmap.recycle();
            }
        }
    }


    @Override
    public void start() {
        if (mMediaPlayer != null) {
            mMediaPlayer.setSurface(new Surface(mSurfaceTexture));
            mMediaPlayer.start();
        }
    }

    @Override
    public void onPause() {
        if (mMediaPlayer != null) {
            mMediaPlayer.pause();
        }
    }

    @Override
    public void onResume() {
        if (mMediaPlayer != null) {
            mMediaPlayer.start();
        }
    }

    @Override
    public void onDestroy() {
        if (mMediaPlayer != null) {
            try {
                mMediaPlayer.pause();
                mMediaPlayer.stop();
                mMediaPlayer.release();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }

        if (faceDetector != null) {
            faceDetector.release();
        }
    }

    @Override
    public void onFrameAvailable(SurfaceTexture surfaceTexture) {

    }
}
