package com.hlm.st_liveness_detector;

import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Rect;
import android.hardware.Camera;
import android.os.Bundle;
import android.os.Environment;
import android.os.SystemClock;
import android.util.Log;
import android.view.View;
import android.widget.Toast;

import com.hlm.st_liveness_detector.view.SimpleImageStore;
import com.sensetime.senseid.sdk.liveness.silent.FaceOcclusion;
import com.sensetime.senseid.sdk.liveness.silent.OnLivenessListener;
import com.sensetime.senseid.sdk.liveness.silent.SilentLivenessApi;
import com.sensetime.senseid.sdk.liveness.silent.common.type.PixelFormat;
import com.sensetime.senseid.sdk.liveness.silent.common.type.ResultCode;
import com.sensetime.senseid.sdk.liveness.silent.common.type.Size;
import com.sensetime.senseid.sdk.liveness.silent.common.util.FileUtil;
import com.sensetime.senseid.sdk.liveness.silent.type.FaceDistance;
import com.sensetime.senseid.sdk.liveness.silent.type.FacePosition;
import com.sensetime.senseid.sdk.liveness.silent.type.LightIntensity;
import com.sensetime.senseid.sdk.liveness.silent.type.OcclusionStatus;

import java.io.File;
import java.io.FileOutputStream;
import java.util.List;

public class SilentLivenessActivity extends AbstractSilentLivenessActivity {
    private final OnLivenessListener mLivenessListener = new OnLivenessListener() {
        private long lastStatusUpdateTime;
        @Override
        public void onInitialized() {
            SilentLivenessApi.start();
            mStartInputData = true;
            // 开启质检检测开关的示例代码（注：init时传入正确的质检模型才会生效）
            SilentLivenessApi.setBlurryFilterEnable(true, 1.4F);
            SilentLivenessApi.setIlluminationFilterEnable(true, 1.899F, 4.997F);
            /*SilentLivenessApi.setEyeOpenThreshold(0.47F);*/
        }
        @Override
        public void onFaceStatusChanged(final int facePosition, final FaceOcclusion faceOcclusion, final int faceDistance, final int lightIntensity) {
            if (SystemClock.elapsedRealtime() - this.lastStatusUpdateTime < 300 && facePosition != FacePosition.NORMAL) {
                return;
            }
            if (facePosition == FacePosition.NO_FACE) {
                mTipsView.setText(R.string.common_tracking_missed);
            } else if (faceDistance == FaceDistance.TOO_CLOSE) {
                mTipsView.setText(R.string.common_face_too_close);
            } else if (facePosition == FacePosition.OUT_OF_BOUND) {
                mTipsView.setText(R.string.common_tracking_out_of_bound);
            } else if (null != faceOcclusion && faceOcclusion.isOcclusion()) {
                final StringBuilder builder = new StringBuilder();
                boolean needComma = false;
                if (faceOcclusion.getBrowOcclusionStatus() == OcclusionStatus.OCCLUSION) {
                    builder.append(SilentLivenessActivity.this.getString(
                            R.string.common_tracking_covered_brow));
                    needComma = true;
                }
                if (faceOcclusion.getEyeOcclusionStatus() == OcclusionStatus.OCCLUSION) {
                    builder.append(needComma ? "、" : "");
                    builder.append(SilentLivenessActivity.this.getString(
                            R.string.common_tracking_covered_eye));
                    needComma = true;
                }
                if (faceOcclusion.getNoseOcclusionStatus() == OcclusionStatus.OCCLUSION) {
                    builder.append(needComma ? "、" : "");
                    builder.append(SilentLivenessActivity.this.getString(
                            R.string.common_tracking_covered_nose));
                    needComma = true;
                }
                if (faceOcclusion.getMouthOcclusionStatus() == OcclusionStatus.OCCLUSION) {
                    builder.append(needComma ? "、" : "");
                    builder.append(SilentLivenessActivity.this.getString(
                            R.string.common_tracking_covered_mouth));
                }
                mTipsView.setText(SilentLivenessActivity.this.getString(R.string.common_tracking_covered,
                                builder.toString()));
            } else if (lightIntensity == LightIntensity.TOO_DARK) {
                SilentLivenessActivity.this.mTipsView.setText(R.string.common_light_too_dark);
            } else if (lightIntensity == LightIntensity.TOO_BRIGHT) {
                SilentLivenessActivity.this.mTipsView.setText(R.string.common_light_too_bright);
            } else if (faceDistance == FaceDistance.TOO_FAR) {
                mTipsView.setText(R.string.common_face_too_far);
            } else {
                mTipsView.setText(R.string.common_detecting);
            }
            this.lastStatusUpdateTime = SystemClock.elapsedRealtime();
        }
        @Override
        public void onFailure(ResultCode resultCode, byte[] protobufData, List imageData, List<Rect> faceRects) {
            // save data
            // saveDataToFile(protobufData, imageData);
            SilentLivenessActivity.this.mStartInputData = false;
            SilentLivenessActivity.this.mIsCanceled = false;

            final Intent data = new Intent();
            data.putExtra(RESULT_SDK_ERROR_CODE, resultCode.name());
            switch (resultCode) {
                case STID_E_TIMEOUT:
                case STID_E_HACK:
                case STID_E_DETECT_FAIL:
                    /*Start:Codes for rebegin.
                    SilentLivenessActivity.this.reBegin(resultCode);
                    return;
                    End*/

                    /*Start: codes for normal detection.*/
                    data.putExtra(RESULT_INFO, getErrorNotice(resultCode));
                    data.putExtra(RESULT_DEAL_ERROR_INNER, false);
                    setResult(ActivityUtils.convertResultCode(resultCode), data);
                    break;
                /*End*/
                default:
                    data.putExtra(RESULT_DEAL_ERROR_INNER, true);
                    SilentLivenessActivity.this.showError(getErrorNotice(resultCode));
                    setResult(ActivityUtils.convertResultCode(resultCode), data);
                    break;
            }
            finish();
        }

        @Override
        public void onSuccess(byte[] protobufData, List imageData, List<Rect> faceRects) {
            // save data
            // saveDataToFile(protobufData, imageData);

            SilentLivenessActivity.this.mStartInputData = false;
            SilentLivenessActivity.this.mIsCanceled = false;
//            File file = new File(getExternalCacheDir() + "/vert.jpg");
            final Intent data = new Intent();
            List<byte[]> imageResult = (List<byte[]>) imageData;
            if (imageResult != null && !imageResult.isEmpty()) {
                byte[] bytes = imageResult.get(0);
                data.putExtra("byte",bytes);
            }

            setResult(RESULT_OK, data);
            finish();
        }
    };

    @Override
    protected void onPostCreate(Bundle savedInstanceState) {
        super.onPostCreate(savedInstanceState);

        final File externalAssets = new File(this.getFilesDir(), "assets");

        /*SilentLivenessApi.init(SilentLivenessActivity.this,
                new File(externalAssets, LICENSE_FILE_NAME).getAbsolutePath(),
                new File(externalAssets, DETECTION_MODEL_FILE_NAME).getAbsolutePath(),
                new File(externalAssets, ALIGNMENT_MODEL_FILE_NAME).getAbsolutePath(), null,
                new File(externalAssets, FRAME_SELECTOR_MODEL_FILE_NAME).getAbsolutePath(),
                new File(externalAssets, ANTI_SPOOFING_MODEL_FILE_NAME).getAbsolutePath(),
                mLivenessListener);*/

        // 开启质量检测需要在init时传入质检模型进行初始化，且需要调用set方法开启质量检测（
        // 具体参照本sample中 OnLivenessListener实例中onInitialized方法内的注释代码）
        SilentLivenessApi.init(SilentLivenessActivity.this,
                new File(externalAssets, LICENSE_FILE_NAME).getAbsolutePath(),
                new File(externalAssets, DETECTION_MODEL_FILE_NAME).getAbsolutePath(),
                new File(externalAssets, ALIGNMENT_MODEL_FILE_NAME).getAbsolutePath(),
                new File(externalAssets, QUALITY_MODEL_FILE_NAME).getAbsolutePath(),
                new File(externalAssets, FRAME_SELECTOR_MODEL_FILE_NAME).getAbsolutePath(),
                new File(externalAssets, ANTI_SPOOFING_MODEL_FILE_NAME).getAbsolutePath(),
                mLivenessListener);

        SilentLivenessApi.setFaceDistanceRate(0.4F, 0.8F);
    }

    @Override
    public void onPreviewFrame(byte[] data, Camera camera) {
        if (!this.mStartInputData) {
            return;
        }
        final int width = this.mSenseCamera.getPreviewSize().getWidth();
        final int height = this.mSenseCamera.getPreviewSize().getHeight();

        SilentLivenessApi.inputData(data, PixelFormat.NV21, new Size(width, height),
                this.mCameraPreviewView.convertViewRectToPicture(this.mOverlayView.getMaskBounds()),
                true, mSenseCamera.getRotationDegrees());
    }

    @Override
    void reBegin(ResultCode resultCode) {
        if (mLoadingView != null) {
            mLoadingView.clearAnimation();
            mLoadingView.setVisibility(View.GONE);
        }
        SilentLivenessApi.stop();
        this.mStartInputData = true;
        this.mIsCanceled = true;
        SilentLivenessApi.start();
        this.mTipsView.setText(null);
        final String tips = getString(R.string.common_silent_detect_again,
                resultCode == null ? "" : resultCode);
        Toast.makeText(this, tips, Toast.LENGTH_SHORT).show();
    }
}
