package com.redrose.videodemo.view;

import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.view.MotionEvent;
import com.redrose.videodemo.camera.CameraUtil2;
import com.redrose.videodemo.camera.DirectDrawer;
import com.redrose.videodemo.camera.H264Encoder;
import com.redrose.videodemo.utils.BitmapCallBack;
import com.redrose.videodemo.utils.LogUtils;
import com.redrose.videodemo.utils.ToastUtil;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

/**
 * Desc:
 *
 * @author: RedRose
 * Date: 2019/4/17
 * Email: yinsxi@163.com
 */

public class PreViewGlsurfaceView extends GLSurfaceView implements GLSurfaceView.Renderer,
        Camera.PictureCallback, Camera.PreviewCallback,
        SurfaceTexture.OnFrameAvailableListener {
    public static final String TAG = "PreViewGlsurfaceView";
    private Context mContext;
    /**
     * 以OpenGL ES纹理的形式从图像流中捕获帧,我把叫做纹理层
     */
    SurfaceTexture mSurface;
    /**
     * 使用的纹理id
     */
    int mTextureID = -1;
    DirectDrawer mDirectDrawer;
    private Camera mCamera;
    private BitmapCallBack mBitmapCallback;
//    int width = 1280;
//    int height = 720;
    int width = 1920;
    int height = 1080;
    int framerate = 30; //一秒30帧
    H264Encoder encoder; //自定义的编码操作类


    public PreViewGlsurfaceView(Context context) {
        this(context, null);
    }

    public PreViewGlsurfaceView(Context context, AttributeSet attrs) {
        super(context, attrs);
        this.mContext = context;
        setEGLContextClientVersion(2);
        setRenderer(this);
        //根据纹理层的监听，有数据就绘制 渲染模式
        //RENDERMODE_WHEN_DIRTY表示被动渲染，只有在调用requestRender或者onResume等方法时才会进行渲染。
        // RENDERMODE_CONTINUOUSLY表示持续渲染
        setRenderMode(RENDERMODE_WHEN_DIRTY);
        mCamera = CameraUtil2.getInstance().openCamera();

    }

    @Override
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
        LogUtils.d(TAG, "--onSurfaceCreated--");
        //得到view表面的纹理id
        mTextureID = createTextureID();
        //使用这个纹理id得到纹理层SurfaceTexture
        mSurface = new SurfaceTexture(mTextureID);
        //监听纹理层
        mSurface.setOnFrameAvailableListener(this);
        mDirectDrawer = new DirectDrawer(mTextureID);
        //打开相机，并未预览
        CameraUtil2.getInstance().doStartPreview(mSurface);
        mCamera.setPreviewCallback(this);
    }

    @Override
    public void onSurfaceChanged(GL10 gl, int width, int height) {
        LogUtils.d(TAG, "--onSurfaceChanged--");
        GLES20.glViewport(0, 0, width, height);
        // 渲染窗口大小发生改变或者屏幕方法发生变化时候回调
        //如果还未预览，就开始预览
        if (CameraUtil2.getInstance().getIsRelease()) {
            mCamera = CameraUtil2.getInstance().openCamera();
            CameraUtil2.getInstance().doStartPreview(mSurface);
        }
    }

    @Override
    public void onDrawFrame(GL10 gl) {
        //系统在每次重画GLSurfaceView时调用这个方法
        //执行渲染工作
        LogUtils.d(TAG, "--onDrawFrame--");
        GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
        //从图像流中将纹理图像更新为最近的帧
        mSurface.updateTexImage();
        mDirectDrawer.draw();
    }

    @Override
    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
        LogUtils.d(TAG, "--onFrameAvailable--");
        //回调接口，用于通知新的流帧可用
        //纹理层有新数据，就通知view绘制
        this.requestRender();
    }

    private int createTextureID() {
        int[] texture = new int[1];
        GLES20.glGenTextures(1, texture, 0);
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]);
        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
        return texture[0];
    }

    @Override
    public void onPause() {
        super.onPause();
        LogUtils.d(TAG, "--onPause--");
        //停止预览
        CameraUtil2.getInstance().releaseCamera();
    }

    public void setBitmapCallback(BitmapCallBack bitmapCallback) {
        this.mBitmapCallback = bitmapCallback;
    }

    public void takePhoto() {
        if (mCamera != null) {
            mCamera.autoFocus(new Camera.AutoFocusCallback() {
                @Override
                public void onAutoFocus(boolean success, Camera camera) {
                    //设置聚焦成功后再拍照，其实可以不用。看需求吧，可以直接调用takePicture（）
                    //有些手机会聚焦失败，也就是success是false
                    if (success) {
                        camera.cancelAutoFocus();
                        mCamera.takePicture(null, null, PreViewGlsurfaceView.this);
                    }
                }
            });
        }
    }

    @Override
    public boolean onTouchEvent(MotionEvent event) {
        /**
         * 触摸聚焦
         */
        mCamera.autoFocus(new Camera.AutoFocusCallback() {
            @Override
            public void onAutoFocus(boolean success, Camera camera) {
                if (success) {
                    ToastUtil.show(getContext(), "聚焦成功");
                    camera.cancelAutoFocus();
                }
            }
        });
        return super.onTouchEvent(event);
    }

    private boolean isStartVideo = false;



    @Override
    public void onPreviewFrame(byte[] data, Camera camera) {
        //这里就是预览返回的 NV21数据
        LogUtils.d(TAG, "--onPreviewFrame--");
        if (encoder != null && isStartVideo){
            encoder.putDate(data); //将一帧的数据传过去处理
        }

    }

    @Override
    public void onPictureTaken(byte[] data, Camera camera) {
        if (mBitmapCallback != null) {
            mBitmapCallback.backByte(data);
        }
        camera.startPreview();
    }
    public void startVideo() {
        isStartVideo = true;
        ToastUtil.show(getContext(), "开始录像");
        encoder = new H264Encoder(width,height,framerate);
        encoder.startEncoder(); //开始编码

    }

    public void stopVideo() {
        isStartVideo = false;
        ToastUtil.show(getContext(), "停止录像");
        if (encoder != null){
            encoder.stopEncoder();
        }

    }
}
