package com.bagus.mediapipefacedetection;


import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageCaptureException;

import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.SurfaceTexture;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import android.util.Size;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.RelativeLayout;
import android.widget.TextView;
import android.widget.Toast;

import com.bagus.mediapipefacedetection.constant.ErrorType;
import com.bagus.mediapipefacedetection.ctrl.FaceDetectCtrl;
import com.bagus.mediapipefacedetection.jni.FaceDetectJni;
import com.bagus.mediapipefacedetection.listener.ResultEntity;
import com.bagus.mediapipefacedetection.manager.FaceDetectManager;
import com.bagus.mediapipefacedetection.module.base.BaseActivity;
import com.bagus.mediapipefacedetection.module.base.BaseView;
import com.bagus.mediapipefacedetection.module.detect.DetectContract;
import com.bagus.mediapipefacedetection.module.detect.DetectPresenterNew;
import com.bagus.mediapipefacedetection.quality.DetectQualityStatus;
import com.bagus.mediapipefacedetection.utils.IMediaPlayer;
import com.google.mediapipe.components.CameraHelper;
import com.google.mediapipe.components.ExternalTextureConverter;
import com.google.mediapipe.components.FrameProcessor;
import com.google.mediapipe.components.PermissionHelper;
import com.google.mediapipe.formats.proto.LandmarkProto;
import com.google.mediapipe.framework.AndroidAssetUtil;
import com.google.mediapipe.framework.Packet;
import com.google.mediapipe.framework.PacketCallback;
import com.google.mediapipe.framework.PacketGetter;
import com.google.mediapipe.glutil.EglManager;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.MessageLite;

import java.io.File;
import java.lang.ref.WeakReference;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

//Code Sources:
//https://github.com/google/mediapipe/blob/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/iristrackinggpu/MainActivity.java
//https://github.com/google/mediapipe/blob/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/basic/MainActivity.java
//https://github.com/TheAdmiral95/IrisTracking

public class MainActivity extends BaseActivity<DetectPresenterNew> implements DetectContract.View {

    private static final String FOCAL_LENGTH_STREAM_NAME = "focal_length_pixel";
    private static final String OUTPUT_LANDMARKS_STREAM_NAME = "face_landmarks_with_iris";
    private static final String BINARY_GRAPH_NAME = "iris_tracking_gpu.binarypb";
    private static final String INPUT_VIDEO_STREAM_NAME = "input_video";
    private static final String OUTPUT_VIDEO_STREAM_NAME = "output_video";
    private static  CameraHelper.CameraFacing CAMERA_FACING = CameraHelper.CameraFacing.FRONT;
    private static final boolean FLIP_FRAMES_VERTICALLY = true;
    private SurfaceTexture surfaceTexture;
    private SurfaceView surfaceView;
    private Button captureImageButton;
    private TextView tvTip;
    private RelativeLayout ll_switch;
    private EglManager eglManager;
    private FrameProcessor frameProcessor;
    private ExternalTextureConverter externalTextureConverter;
    private CameraXPreviewHelper cameraXPreviewHelper;
    private LandmarkProto.NormalizedLandmarkList currentLandmarks;
    private List<LandmarkProto.NormalizedLandmark> captureLandmarks;
    private boolean landmarksExist;
    private boolean haveSidePackets = false;
    private ImageCapture.OnImageSavedCallback imageSavedCallback;
    private ImageCapture.Builder imageCaptureBuilder;
    private final Size cameraResolution = new Size(2448, 3264);
    private String SAVE_FILE_DIR = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS).toString() + "/";
    private File fullImageFile;
    private String headImageFile;
    private IrisData irisData;
    private ProgressBar progress;

    static {
        System.loadLibrary("mediapipe_jni");
        System.loadLibrary("opencv_java3");
        System.loadLibrary("detectFace");
    }

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        //Initializing Super Class
        super.onCreate(savedInstanceState);
//        setContentView(R.layout.activity_main);
        SAVE_FILE_DIR = getCacheDir() + "/";
        //Initializing Full Image File
        fullImageFile = new File(SAVE_FILE_DIR + "full.jpg");
        headImageFile = SAVE_FILE_DIR + "head.jpg";

        //Initializing landmarksExist
        landmarksExist = false;
        captureLandmarks = null;

        //Getting Button From Main Activity and Setting Handler
        captureImageButton = findViewById(R.id.capImage);
        captureImageButton.setOnClickListener(new ImageCaptureBtnHandler());
        tvTip = findViewById(R.id.results);
        ll_switch = findViewById(R.id.ll_switch);
        progress = findViewById(R.id.progress);
//        surfaceView = findViewById(R.id.surfaceView);

        //Initializing the imageCaptureBuilder
        imageCaptureBuilder = new ImageCapture.Builder();

        //Creating Image Saved Callback
        generateImageSavedCallback();

        //Setting Up Preview
        surfaceView = new SurfaceView(this);
        setupPreviewDisplayView();

        //Initializing Asset Manager
        AndroidAssetUtil.initializeNativeAssetManager(this);

        //Setting up Frame Processor
        eglManager = new EglManager(null);
        frameProcessor = new FrameProcessor(this, eglManager.getNativeContext(), BINARY_GRAPH_NAME, INPUT_VIDEO_STREAM_NAME, OUTPUT_VIDEO_STREAM_NAME);
        frameProcessor.getVideoSurfaceOutput().setFlipY(FLIP_FRAMES_VERTICALLY);

        //Adding the Landmark Packet Callback
        addingLandmarkPacketCallback();

        //Getting Camera Permissions
        PermissionHelper.checkAndRequestCameraPermissions(this);

        //切换相机
        ll_switch.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View v) {
                switchCamera();
            }
        });
    }


    //From: https://github.com/google/mediapipe/blob/master/mediapipe/examples/android/src/java/com/google/mediapipe/apps/iristrackinggpu/MainActivity.java
    private void addingLandmarkPacketCallback() {
        getPresenter().startDetect2();
        frameProcessor.addPacketCallback(OUTPUT_LANDMARKS_STREAM_NAME,
                new PacketCallback() {
                    @Override
                    public void process(Packet packet) {
                        byte[] rawLandmarks = PacketGetter.getProtoBytes(packet);
                        try {
                            //Converting the Landmarks from their Raw Form
                            currentLandmarks = LandmarkProto.NormalizedLandmarkList.parseFrom(rawLandmarks);

                            //检测人脸图片
                            getPresenter().doDetect2(currentLandmarks.getLandmarkList(),surfaceView.getWidth(),surfaceView.getHeight());
                            //Updating the state of the landmarksExist Variable
                            if (currentLandmarks == null) landmarksExist = false;
                            else landmarksExist = true;

                        } catch (InvalidProtocolBufferException e) {
                        }
                    }
                }
        );
    }

    private void onCameraStarted(SurfaceTexture surfaceTexture) {
        showTips(999);
        this.surfaceTexture = surfaceTexture;
        // Make the display view visible to start showing the preview. This triggers the
        // SurfaceHolder.Callback added to (the holder of) previewDisplayView.
        this.surfaceView.setVisibility(View.VISIBLE);

        //This method is called on activity resume, however the following code should only be executed once
        if (!haveSidePackets) {
            float focalLength = cameraXPreviewHelper.getFocalLengthPixels();
            if (focalLength != Float.MIN_VALUE) {
                Packet focalLengthSidePacket = frameProcessor.getPacketCreator().createFloat32(focalLength);
                Map<String, Packet> inputSidePackets = new HashMap<>();
                inputSidePackets.put(FOCAL_LENGTH_STREAM_NAME, focalLengthSidePacket);
                frameProcessor.setInputSidePackets(inputSidePackets);
            }
            haveSidePackets = true;
        }
    }

    ViewGroup viewGroup;
    private void setupPreviewDisplayView() {
        surfaceView.setVisibility(View.GONE);
        viewGroup = findViewById(R.id.preview_display_layout);
        viewGroup.addView(surfaceView);

        surfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
            @Override
            public void surfaceCreated(SurfaceHolder holder) {
                frameProcessor.getVideoSurfaceOutput().setSurface(holder.getSurface());
            }

            @Override
            public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
                onPreviewDisplaySurfaceChanged(width, height);
            }

            @Override
            public void surfaceDestroyed(SurfaceHolder holder) {
                frameProcessor.getVideoSurfaceOutput().setSurface(null);
            }
        });
    }

    protected void onPreviewDisplaySurfaceChanged(int width, int height) {
        Size viewSize = new Size(width, height);
        Size displaySize = cameraXPreviewHelper.computeDisplaySizeFromViewSize(viewSize);
        boolean isCameraRotated = cameraXPreviewHelper.isCameraRotated();
        externalTextureConverter.setSurfaceTextureAndAttachToGLContext(
                surfaceTexture,
                isCameraRotated ? displaySize.getHeight() : displaySize.getWidth(),
                isCameraRotated ? displaySize.getWidth() : displaySize.getHeight());
    }

    @Override
    protected void onResume() {
        super.onResume();
        lastTipsResId = -1;
        //必须重新创建，否则返回没有语音播报
        mIMediaPlayer = new IMediaPlayer(this);

        externalTextureConverter = new ExternalTextureConverter(eglManager.getContext());
        externalTextureConverter.setFlipY(FLIP_FRAMES_VERTICALLY);
        externalTextureConverter.setConsumer(frameProcessor);
        if (PermissionHelper.cameraPermissionsGranted(this)) {
            startCamera();
        }
    }

    private void startCamera() {
        cameraXPreviewHelper = new CameraXPreviewHelper();
        cameraXPreviewHelper.setOnCameraStartedListener(
                surfaceTexture -> {
                    onCameraStarted(surfaceTexture);
                }
        );
        cameraXPreviewHelper.startCamera(this, imageCaptureBuilder, CAMERA_FACING, null, null);
    }

    private void switchCamera(){
        lastTipsResId = -1;
        if(cameraXPreviewHelper == null)
            return;
        if(CAMERA_FACING == CameraHelper.CameraFacing.FRONT){
            CAMERA_FACING = CameraHelper.CameraFacing.BACK;
        }else{
            CAMERA_FACING = CameraHelper.CameraFacing.FRONT;
        }
        cameraXPreviewHelper.startCamera(this, imageCaptureBuilder, CAMERA_FACING, null, null);
        viewGroup.removeAllViews();
        surfaceView = new SurfaceView(this);
        setupPreviewDisplayView();
    }

    @Override
    public SurfaceTexture getSurfaceTexture() {
        return null;
    }

    @Override
    public void initFinish() {

    }

    @Override
    public void setUserTips(int tipsType) {
        Bundle data = new Bundle();
        data.putInt("tipsType", tipsType);
        sendHanderMessage(HANDLE_WHAT_SET_TIPS, data);
    }

    private MyHandler mHandler = null;
    private final static int HANDLE_WHAT_REDRAW_TEXTUREVIEW = 100;
    private final static int HANDLE_WHAT_SET_TIPS = 105;
    private final static int HANDLE_WHAT_INIT_FINISH = 113;
    private final static int HANDLE_WHAT_SHOW_STATICS = 116;
    private static class MyHandler extends Handler {
        private final WeakReference<MainActivity> mActivity;

        public MyHandler(MainActivity activity) {
            this.mActivity = new WeakReference<MainActivity>(activity);
        }

        @Override
        public void handleMessage(@NonNull Message msg) {
            MainActivity activity = mActivity.get();
            super.handleMessage(msg);
            if (activity != null) {
                switch (msg.what) {
                    case HANDLE_WHAT_REDRAW_TEXTUREVIEW:
                        Bundle data = msg.getData();
                        int cameraHeight = data.getInt("cameraHeight");
                        int cameraWidth = data.getInt("cameraWidth");
//                        activity.adjustTextureViewSize(cameraWidth, cameraHeight);
                        break;
                    case HANDLE_WHAT_SET_TIPS:
                        Bundle tipsData = msg.getData();
                        int tipsType = tipsData.getInt("tipsType");
                        activity.showTips(tipsType);
                        break;
                    case HANDLE_WHAT_INIT_FINISH:
//                        activity.getPresenter().startDetect();
                        break;
                    case HANDLE_WHAT_SHOW_STATICS:
//                        Bundle staticsData = msg.getData();
//                        String statics = staticsData.getString("statics");
//                        activity.tvStatics.setText(statics);
                        break;
                    default:
                        break;
                }
            }
        }
    }

    private int lastTipsResId = -1;
    private IMediaPlayer mIMediaPlayer;
    private void showTips(int tips) {
        String statusTips = null;
        int tipsResId = -1;
        if (tips == DetectQualityStatus.MGFacePPSkinAICameraFaceNone.ordinal()) {
            return;
        }
        if (tips == DetectQualityStatus.MGFacePPSkinAICameraFaceNotFound.ordinal()) {
            statusTips = getResources().getString(R.string.face_quality_tips_face_not_found);
            tipsResId = R.raw.tips_no_face;
        } else if (tips == DetectQualityStatus.MGFacePPSkinAICameraFaceTooLarge.ordinal()) {
            statusTips = getResources().getString(R.string.face_quality_tips_too_large);
            tipsResId = R.raw.tips_too_near;
        } else if (tips == DetectQualityStatus.MGFacePPSkinAICameraFaceTooSmall.ordinal()) {
            statusTips = getResources().getString(R.string.face_quality_tips_too_small);
            tipsResId = R.raw.tips_too_far;
        } else if (tips == DetectQualityStatus.MGFacePPSkinAICameraFaceNeedLeft.ordinal()) {
            statusTips = getResources().getString(R.string.face_quality_tips_need_left);
            tipsResId = R.raw.tips_need_left;
        } else if (tips == DetectQualityStatus.MGFacePPSkinAICameraFaceNeedRight.ordinal()) {
            statusTips = getResources().getString(R.string.face_quality_tips_need_right);
            tipsResId = R.raw.tips_need_right;
        } else if (tips == DetectQualityStatus.MGFacePPSkinAICameraFaceNeedTop.ordinal()) {
            statusTips = getResources().getString(R.string.face_quality_tips_need_top);
            tipsResId = R.raw.tips_need_top;
        } else if (tips == DetectQualityStatus.MGFacePPSkinAICameraFaceNeedBottom.ordinal()) {
            statusTips = getResources().getString(R.string.face_quality_tips_need_bottom);
            tipsResId = R.raw.tips_need_bottom;
        } else if (tips == DetectQualityStatus.MGFacePPSkinAICameraFaceAlignCenter.ordinal()) {
            statusTips = getResources().getString(R.string.face_quality_tips_align_center);
            tipsResId = R.raw.tips_align_center;
        } else if (tips == DetectQualityStatus.MGFacePPSkinAICameraFaceKeepOutEye.ordinal()) {
            statusTips = getResources().getString(R.string.face_quality_tips_keep_out_eye);
            tipsResId = R.raw.tips_keep_out_eye;
        } else if (tips == DetectQualityStatus.MGFacePPSkinAICameraFaceKeepOutMouth.ordinal()) {
            statusTips = getResources().getString(R.string.face_quality_tips_keep_out_mouth);
            tipsResId = R.raw.tips_keep_out_mouth;
        } else if (tips == DetectQualityStatus.MGFacePPSkinAICameraFaceCloseEyes.ordinal()) {
            statusTips = getResources().getString(R.string.face_quality_tips_close_eyes);
            tipsResId = R.raw.tips_close_eye;
        } else if (tips == DetectQualityStatus.MGFacePPSkinAICameraFaceKeeping.ordinal()) {
            statusTips = getResources().getString(R.string.face_quality_tips_keeping2);
            tipsResId = R.raw.tips_keeping;
        } else if (tips == DetectQualityStatus.MGFacePPSkinAICameraFaceFinish.ordinal()) {
            statusTips = getResources().getString(R.string.face_quality_tips_finish);
            tipsResId = R.raw.tips_well_done;
        } else if (tips == 999) {
            statusTips = "请摘下眼镜，露出额头，平视摄像头";
            tipsResId = R.raw.skin_takeoff_glasses;
        }
        tvTip.setText(statusTips);
//        Log.e("yeqing",statusTips);
        if (lastTipsResId != tipsResId) {
            //保证语音播放完整，才播放下一个
            if (lastTipsResId == -1) {
                mIMediaPlayer.doPlay(tipsResId);
            } else if (lastTipsResId == R.raw.skin_takeoff_glasses && mIMediaPlayer.isComplete()) {
                mIMediaPlayer.doPlay(tipsResId);
            }
            mIMediaPlayer.setOnCompletionListener(tipsResId, true);
        }
        lastTipsResId = tipsResId;
    }

    private void sendHanderMessage(int what, Bundle data) {
        Message msg = new Message();
        msg.what = what;
        msg.setData(data);
        mHandler.sendMessage(msg);
    }

    @Override
    public void showStatics(String statics) {

    }

    @Override
    public void showLoadingProgress() {

    }

    @Override
    public void cameraOpened() {

    }

    @Override
    public void detectFinish(ErrorType errorType, ResultEntity result) {

    }

    @Override
    public void onExit(ErrorType errorType, ResultEntity result) {

    }


    private class ImageCaptureBtnHandler implements View.OnClickListener {
        @Override
        public void onClick(View view) {

            String outputFilename = "output";

            //Checking if any landmarks are found
            if (landmarksExist) {
                //Displaying Result to the User
//                Toast.makeText(view.getContext(), "Iris Landmarks Found: Captured Image", Toast.LENGTH_SHORT).show();
                Toast.makeText(MainActivity.this,"图片处理中，请稍后。。。。。", Toast.LENGTH_SHORT).show();
                //Saving the Current Landmarks
                captureLandmarks = currentLandmarks.getLandmarkList();

//                double[] pointsArray = new double[captureLandmarks.size() * 3];
//                for (int i = 0; i < captureLandmarks.size(); i++) {
//                    pointsArray[3 * i] = captureLandmarks.get(i).getX();
//                    pointsArray[3 * i + 1] = captureLandmarks.get(i).getY();
//                    pointsArray[3 * i + 2] = captureLandmarks.get(i).getZ();
//                }
//                FaceDetectJni faceDetectJni = new FaceDetectJni();
//                String result = faceDetectJni.startDetectFace(pointsArray);
//                double[] heads = faceDetectJni.detectFinish(pointsArray);
//                float[] infos = faceDetectJni.nativeStartDetect(pointsArray,surfaceView.getWidth(),surfaceView.getHeight());
//                Log.d("yeqing", result);
//                Log.d("yeqing","头部信息："+Arrays.toString(heads));
//
//                FaceDetectCtrl faceDetectCtrl = new FaceDetectCtrl();
//                faceDetectCtrl.detect(0,captureLandmarks,surfaceView.getWidth(),surfaceView.getHeight());
//                Log.d("yeqing","人脸检测信息："+ Arrays.toString(infos));
                //Getting the Current Image
                new takePicTask().execute();
//                cameraXPreviewHelper.takePicture(fullImageFile, imageSavedCallback);
                getPresenter().stopDetect();
            } else //Notifying the user
            {
                //Displaying Result to the User
                Toast.makeText(view.getContext(), "No Landmarks Found", Toast.LENGTH_SHORT).show();
            }
        }
    }



    private void generateImageSavedCallback() {
        imageSavedCallback = new ImageCapture.OnImageSavedCallback() {
            @Override
            public void onImageSaved(@NonNull ImageCapture.OutputFileResults outputFileResults) {
                //Generating Bitmap
//                Bitmap bmp = BitmapFactory.decodeFile(fullImageFile.getPath());
                //Creating IrisData
//                irisData = new IrisData(captureLandmarks, bmp);
//                irisData.generateIrisImages(SAVE_FILE_DIR + "LeftIris.jpg", SAVE_FILE_DIR + "RightIris.jpg");
                //Deleting the Temporary File
//                fullImageFile.delete();
                ll_switch.post(new Runnable() {
                    @Override
                    public void run() {
                        progress.setVisibility(View.VISIBLE);
                    }
                });

                new SavePicTask().execute();
            }

            @Override
            public void onError(@NonNull ImageCaptureException exception) {

            }
        };
    }

    private class takePicTask extends AsyncTask<Void, Void, String> {

        @Override
        protected void onPreExecute() {

        }

        @Override
        protected String doInBackground(Void... params) {
            cameraXPreviewHelper.takePicture(fullImageFile, imageSavedCallback);
            return "";
        }

        @Override
        protected void onPostExecute(String result) {
            super.onPostExecute(result);
            Toast.makeText(MainActivity.this,"拍摄完成", Toast.LENGTH_SHORT).show();
        }
    }

    private class SavePicTask extends AsyncTask<Void, Void, String> {

        @Override
        protected void onPreExecute() {

        }

        @Override
        protected String doInBackground(Void... params) {
            getPresenter().handePictureResult2(fullImageFile.getAbsolutePath(),headImageFile);
            return "";
        }

        @Override
        protected void onPostExecute(String result) {
            super.onPostExecute(result);
            progress.setVisibility(View.GONE);
            showTips(13);
            Toast.makeText(MainActivity.this,"图片处理完成", Toast.LENGTH_SHORT).show();
            finish();
        }
    }

    @Override
    public void finish() {
        Intent intent = new Intent();
        Bundle bundle = new Bundle();
        bundle.putString("full",fullImageFile.getAbsolutePath());
        bundle.putString("head",headImageFile);
        intent.putExtra("bundle",bundle);
        setIntent(intent);
        super.finish();
    }

    private List<LandmarkProto.NormalizedLandmark> copyLandmarks(LandmarkProto.NormalizedLandmarkList source) {
        return source.getLandmarkList();
    }

    @Override
    protected void onPause() {
        super.onPause();
        if (mIMediaPlayer != null) {
            mIMediaPlayer.close();
        }
        getPresenter().stopDetect();
        externalTextureConverter.close();
        surfaceView.setVisibility(View.GONE);
    }

    @Override
    protected void initView() {
        mHandler = new MyHandler(this);
    }

    @Override
    protected int getLayoutResId() {
        return R.layout.activity_main;
    }

    @Override
    protected void initData() {

    }

    @Override
    protected DetectPresenterNew createPresenter() {
        return new DetectPresenterNew();
    }

    @Override
    public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
        super.onRequestPermissionsResult(requestCode, permissions, grantResults);
        PermissionHelper.onRequestPermissionsResult(requestCode, permissions, grantResults);
    }
}