package com.wisdom.smartbook.activity;

import android.annotation.SuppressLint;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.hardware.Camera;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Message;
import android.renderscript.Allocation;
import android.renderscript.Element;
import android.renderscript.RenderScript;
import android.renderscript.ScriptIntrinsicYuvToRGB;
import android.renderscript.Type;
import android.util.Size;
import android.view.View;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.TextView;

import androidx.annotation.NonNull;
import androidx.fragment.app.FragmentActivity;

import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.android.odml.image.MlImage;
import com.google.mlkit.vision.common.InputImage;
import com.google.mlkit.vision.face.Face;
import com.google.mlkit.vision.face.FaceDetection;
import com.google.mlkit.vision.face.FaceDetector;
import com.google.mlkit.vision.face.FaceDetectorOptions;
import com.wisdom.smartbook.R;
import com.wisdom.smartbook.facelib.bean.ImageData;
import com.wisdom.smartbook.facelib.core.AndroidConfig;
import com.wisdom.smartbook.facelib.core.FaceCheckManager;
import com.wisdom.smartbook.facelib.encoder.EncoderBus;
import com.wisdom.smartbook.facelib.engine.ActionEngine;
import com.wisdom.smartbook.facelib.engine.CameraEngine;
import com.wisdom.smartbook.facelib.engine.CheckFaceEngine;
import com.wisdom.smartbook.facelib.engine.Const;
import com.wisdom.smartbook.facelib.utils.MyLogger;
import com.wisdom.smartbook.facelib.utils.NV21ToBitmap;
import com.wisdom.smartbook.facelib.utils.PermissionUtils;
import com.wisdom.smartbook.facelib.utils.SensorEventUtil;
import com.wisdom.smartbook.facelib.view.CircleImageView;
import com.wisdom.smartbook.facelib.view.FaceRectView;
import com.wisdom.smartbook.facelib.view.LegacyCameraConnectionFragment;
import com.wisdom.smartbook.facelib.view.OverlayView;
import com.wisdom.smartbook.utils.KLog;

import java.util.List;

import static com.google.mlkit.vision.face.FaceDetectorOptions.PERFORMANCE_MODE_FAST;

public class FaceRecogMainActivity extends FragmentActivity implements
        Camera.PreviewCallback {
    private static final String TAG = "CameraActicity";
    // 照相机预览宽
    public int previewWidth = 0;
    // 照相机预览高
    public int previewHeight = 0;
    // 展示区域宽
    public static float ScreenWidth;
    // 展示区域高
    public static float ScreenHeight;
    public static int CameraId = 0;
    private boolean isProcessingFrame = false;
    // 是否是前置摄像头
    public static boolean is_front_camera = Const.IS_FRONT_CAMERA;
    private Handler handler;
    private HandlerThread handlerThread;
    protected SensorEventUtil sensorEventUtil;
    // 相机的数据 nv21格式
    protected byte[] mNV21Bytes;
    private Runnable postInferenceCallback;
    private Runnable imageConverter;
    public NV21ToBitmap nv21ToBitmap;
    private ActionEngine actionEngine;

    /*****操作按钮****/
    private TextView pre_toast_text;//提示信息
    private FaceRectView mFaceRectView;
    private FaceDetector detector;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
        setContentView(R.layout.activity_main_recog_face);
        toCheckPermission();

        // 1、配置人脸检测器
        FaceDetectorOptions faceDetectorOptions = new FaceDetectorOptions.Builder()
                .setPerformanceMode(PERFORMANCE_MODE_FAST)
                .build();
        //2、获取人脸检测器
        detector = FaceDetection.getClient(faceDetectorOptions);

        initRender();
    }

    private void toCheckPermission() {
        PermissionUtils.checkPermission(this, () -> init());
    }

    @Override
    public void onRequestPermissionsResult(final int requestCode, final String[] permissions, final int[] grantResults) {
        if (requestCode == 1) {
            if (grantResults.length > 0
                    && grantResults[0] == PackageManager.PERMISSION_GRANTED
                    && grantResults[1] == PackageManager.PERMISSION_GRANTED) {
                init();
            } else {
                toCheckPermission();
            }
        }
    }

    private void init() {
        mFaceRectView = findViewById(R.id.facerectview);
        pre_toast_text = findViewById(R.id.pre_toast_text);


        //检测结果
        actionEngine = ActionEngine.getInstance();
        actionEngine.setOnActionCallback(new ActionEngine.OnActionCallback() {
            @Override
            public void onMessage(String msg) {
                pre_toast_text.setText(msg);
            }

            @Override
            public void onResult(boolean isOk, String msg) {
                if (isOk) {
                    //活体检测通过
                } else {
                    //活体检测未通过
                }
            }
        });
        actionEngine.start();
        //动作识别监测
        CheckFaceEngine.getInstance().setOnFaceCheckResult(type -> {
            switch (type) {
                case Const.ACTION_TYPE_EYE:
                case Const.ACTION_TYPE_MOUTH:
                case Const.ACTION_TYPE_LEFT_RIGHT_HEAD:
                case Const.ACTION_TYPE_RISE_HEAD:
                case Const.ACTION_TYPE_BOW_HEAD:
                    actionEngine.onResult(type);
                    break;
                case Const.ACTION_TYPE_NOT_ZHENGDUI:
                    pre_toast_text.setText("请正对屏幕!");
                    break;
                case Const.ACTION_TYPE_NO_FACE:
                    pre_toast_text.setText("保持面部在取景框内");
                    break;
                default:
                    if (bitmap == null) {
                        toGetImage();
                    }
                    break;
            }
        });
        nv21ToBitmap = new NV21ToBitmap(this);
        setFragment();
    }

    //相机预览数据byte[]转bitmap耗时优化方案对象
    private RenderScript rs;
    private ScriptIntrinsicYuvToRGB yuvToRgbIntrinsic;
    private Type.Builder yuvType;
    private Type.Builder rgbaType;
    private Allocation in;
    private Allocation out;

    private void initRender() {
        rs = RenderScript.create(this);
        yuvToRgbIntrinsic = ScriptIntrinsicYuvToRGB.create(rs, Element.U8_4(rs));
    }

    private Bitmap convertBitmap4RenderScript(byte[] data, Camera camera) {
        Camera.Size size = camera.getParameters().getPreviewSize();
        if (yuvType == null) {
            yuvType = new Type.Builder(rs, Element.U8(rs)).setX(data.length);
            in = Allocation.createTyped(rs, yuvType.create(), Allocation.USAGE_SCRIPT);
        }

        if (rgbaType == null) {
            rgbaType = new Type.Builder(rs, Element.RGBA_8888(rs)).setX(size.width).setY(size.height);
            out = Allocation.createTyped(rs, rgbaType.create(), Allocation.USAGE_SCRIPT);
        }

        in.copyFrom(data);
        yuvToRgbIntrinsic.setInput(in);
        yuvToRgbIntrinsic.forEach(out);

        Bitmap bitmap = Bitmap.createBitmap(size.width, size.height, Bitmap.Config.ARGB_8888);
        out.copyTo(bitmap);

        return bitmap;
    }


    public void processMKLImage(Bitmap bitmap) {
        InputImage image = InputImage.fromBitmap(bitmap, 0);        // 4、处理图片
        detector.process(image)
                .addOnSuccessListener(new OnSuccessListener<List<Face>>() {
                    @Override
                    public void onSuccess(List<Face> faces) {
                        KLog.e("processMKLImage", " processMKLImage  onSuccess: " + 1);
                    }
                })
                .addOnFailureListener(new OnFailureListener() {
                    @Override
                    public void onFailure(@NonNull Exception e) {
                        KLog.e("processMKLImage", "processMKLImage onFailure: " + 1);
                    }
                });
    }

    public void faceinit() {
        mNV21Bytes = new byte[previewHeight * previewWidth];
        /**
         * 初始化
         * */
        FaceCheckManager.init(
                FaceRecogMainActivity.this,
                AndroidConfig
                        .create()
                        .setCameraMode()
                        .setDefaultFunc().openFunc(AndroidConfig.Func.Attribution)
                        .setDefaultInputImageFormat()
                        .setInputImageSize(previewWidth, previewHeight)
                        .setOutputImageSize((int) ScreenWidth, (int) ScreenHeight)
        );
        if (sensorEventUtil == null) {
            sensorEventUtil = new SensorEventUtil(this);
        }
    }

    long lastProcessTime = 0;

    @SuppressLint("NewApi")
    @Override
    public void onPreviewFrame(final byte[] bytes, final Camera camera) {
        if (isProcessingFrame) {
            return;
        }
        MyLogger.logError(TAG, "onPreviewFrame: ");

//        if (System.currentTimeMillis() - lastProcessTime > 1000) {
//            Bitmap bitmap = convertBitmap4RenderScript(bytes, camera);
//            processMKLImage(bitmap);
//            lastProcessTime = System.currentTimeMillis();
//        }

//        isProcessingFrame = true;
//        try {
//            if (mNV21Bytes == null) {
//                Camera.Size previewSize = camera.getParameters().getPreviewSize();
//                previewHeight = previewSize.height;
//                previewWidth = previewSize.width;
//                faceinit();
//                EncoderBus.GetInstance().onSetFrameConfiguration(previewHeight, previewWidth);
//                trackingOverlay = findViewById(R.id.facing_overlay);
//                trackingOverlay.addCallback(canvas -> EncoderBus.GetInstance().onDraw(canvas));
//            }
//        } catch (final Exception e) {
//            MyLogger.logError(TAG, "onPreviewFrame: " + e);
//            return;
//        }
//        imageConverter = () -> mNV21Bytes = bytes;
//        postInferenceCallback = () -> {
//            camera.addCallbackBuffer(bytes);
//            isProcessingFrame = false;
//        };
//        processImage();
    }

    @Override
    public synchronized void onStart() {
        super.onStart();
    }

    @Override
    public synchronized void onResume() {
        super.onResume();
        handlerThread = new HandlerThread("inference");
        handlerThread.start();
        handler = new Handler(handlerThread.getLooper());
        processImage();
    }

    @Override
    public synchronized void onPause() {
        handlerThread.quitSafely();
        try {
            handlerThread.join();
            handlerThread = null;
            handler = null;
        } catch (final InterruptedException e) {
            MyLogger.logError(TAG, "onPause: " + e);
        }
        super.onPause();
    }

    @Override
    public synchronized void onStop() {
        super.onStop();
    }

    @Override
    public synchronized void onDestroy() {
        super.onDestroy();
        /**
         * 释放
         * */
        FaceCheckManager.release();
        actionEngine.destory();
        thread = null;
    }

    protected void setFragment() {
        LegacyCameraConnectionFragment fragment = new LegacyCameraConnectionFragment(this,
                getLayoutId(), new Size(720, 720));
        CameraId = fragment.getCameraId();
        getFragmentManager().beginTransaction().replace(R.id.container, fragment).commit();
    }

    protected void readyForNextImage() {
        if (postInferenceCallback != null) {
            postInferenceCallback.run();
        }
    }

    protected synchronized void runInBackground(final Runnable r) {
        if (handler != null) {
            handler.post(r);
        }
    }

    //得到最新的bytes
    protected void getCameraBytes() {
        if (imageConverter != null) {
            imageConverter.run();
        }
    }

    protected int getLayoutId() {
        return R.layout.camera_connection_fragment;
    }

    private OverlayView trackingOverlay;

    protected void processImage() {
        if (sensorEventUtil != null) {
            getCameraBytes();
            int degree = CameraEngine.getInstance().getCameraOrientation(sensorEventUtil.orientation);
            /**
             * 设置旋转角
             */
            FaceCheckManager.Camera.setRotation(degree - 90, false, (int) ScreenWidth, (int) ScreenHeight);

            /**
             * 获取人脸信息
             */
            FaceCheckManager.FaceDetect faceDetect = FaceCheckManager.detect(mNV21Bytes);
            if (faceDetect.getFaceCount() > 0) {
//                List<FaceLandmarkInfo> landmarkInfos = faceDetect.landmark2d();
//                CheckFaceEngine.getInstance().onCheckAction(landmarkInfos);
//                if (mFaceRectView != null) {
//                    mFaceRectView.clearFaceInfo();
//                }
//                mFaceRectView.addFaceInfo(faceDetect);
            } else {
                CheckFaceEngine.getInstance().noFace();
            }
        }
        runInBackground(() -> {
            readyForNextImage();
            if (trackingOverlay != null) {
                trackingOverlay.postInvalidate();
            }
        });
    }

    /**
     * 获取人脸截图
     */
    private boolean isStart = true;
    private Thread thread;

    private void toGetImage() {
        if (bitmap != null) {
            return;
        }
        if (!isStart) {
            return;
        }
        isStart = false;
        thread = new Thread(() -> {
            try {
                Bitmap bitmap = nv21ToBitmap.nv21ToBitmap(mNV21Bytes, previewWidth, previewHeight);
                ImageData imageData = new ImageData();
                imageData.setBitmap(bitmap);
                Message message = new Message();
                message.what = 1;
                message.obj = imageData;
                handler2.sendMessage(message);
            } catch (Exception e) {
                e.printStackTrace();
            }
            isStart = true;
        });
        thread.start();
    }

    private Bitmap bitmap;
    private Handler handler2 = new Handler() {
        @Override
        public void handleMessage(Message message) {
            super.handleMessage(message);
            switch (message.what) {
                case 1:
                    ImageData imageData = (ImageData) message.obj;
                    bitmap = imageData.getBitmap();
                    break;
            }
        }
    };

}