package win.smartown.android.library.faceEffects;

import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Point;
import android.hardware.Camera;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Message;
import android.text.SpannableStringBuilder;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;

import androidx.annotation.RequiresApi;

import com.blankj.utilcode.util.SPUtils;
import com.kaopiz.kprogresshud.KProgressHUD;
import com.orhanobut.dialogplus.DialogPlus;
import com.orhanobut.dialogplus.OnCancelListener;
import com.orhanobut.dialogplus.OnItemClickListener;
import com.orhanobut.dialogplus.ViewHolder;
import com.tenginekit.AndroidConfig;
import com.tenginekit.KitCore;
import com.tenginekit.model.TenginekitPoint;

import java.io.File;
import java.io.FileOutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;

import win.smartown.android.library.faceEffects.detection.BitmapUtils;
import win.smartown.android.library.faceEffects.detection.CommonShareBitmap;
import win.smartown.android.library.faceEffects.detection.FaceDetect;
import win.smartown.android.library.faceEffects.detection.FaceDetectResult;
import win.smartown.android.library.faceEffects.detection.MakeupBeautyUtils;
import win.smartown.android.library.faceEffects.detection.ScanResult;

import static win.smartown.android.library.faceEffects.SmallFaceUtils.smallFaceMesh;
import static win.smartown.android.library.faceEffects.detection.MagnifyEyeUtils.magnifyEye;

/**
 * Created by h2008 on 2021/5/12 11:46.
 * <br>
 * Desc:
 * <br>
 * 拍照界面
 */
public class CameraActionDetectActivity3 extends Activity implements View.OnClickListener {

    //主线程消息
    public final static int SCAN_RESULT = 1001;
    public final static int SCAN_ERROR = 1002;
    public final static int FRONT_RECOGNIZEOVER = 1004;
    public final static int FIRSTTIMEOUT = 1005;

    //子线程消息
    public final static int RECOGNIZE_BITMAP = 1003;
    public final static int RECOGNIZE_STREAM = 1006;

    private HandlerThread recognizeThread;
    private Handler recognizeHandler;

    private CameraPreview1 cameraPreview1;
    private ImageView cropView;
    private Bitmap temp;
    private TextView tv_tip;

    FaceDetect faceDetect;
    private Camera.Size size;
//    private NV21ToBitmap nv21tool;

    KProgressHUD hud;
    OkHttpUpUtil okHttpUpUtil = new OkHttpUpUtil();
    String dir = Environment.getExternalStorageDirectory().getAbsolutePath() + "/ocr/";

    //图片压缩率
    private int bmpqulity = 50;

    private String IDCARD = "";//测试用身份证ID
    boolean isFrontRecognizeSuccess = false;
    boolean isBackRecognizeSuccess = false;
    private static final int TIME_OUT = 1000000;//超时时间10s

    Timer mTimer;
    String actionOfClass; // 动作类型，2001：人脸向右转，2002：人脸向左转，2003:眨眼。

    //原始分辨率
    private float screenwidth = 1080;

    private float screenheight = 1920;

    private float bmapscale = 1.0f;
    //绘制五点用
    private float boxleft = 0;
    private float boxright = 0;
    private float boxtop = 0;
    private float boxbottom = 0;

    private float faceboxheight = 0;
    private float faceboxwidth = 0;

    //画出人脸位置的框体
    //上下编剧
    //左右编剧
    private float updis = 0.16f;
    private float leftdis = 0.12f;
    private float dboxleft;
    private float dboxright;
    private float dboxtop;
    private float dboxbottom;

    public List<TenginekitPoint> landmarksInfos;


    // nv21 data from camera
    protected byte[] mNV21Bytes;
    // camera preview width
    protected int previewWidth;
    // camera preview height
    protected int previewHeight;
    // content display screen width
    public static int ScreenWidth;
    // content display screen height
    public static int ScreenHeight;

    boolean facePosition = false; // 人脸是否在框里面


    //是否完成上传 0未上传 1上传中 2上传完成
    int isUploadImageComplete = 0;
    //是否在前端进行检测图片
    boolean isFronthandleImage = false;
    //是否在超时
    boolean isTimeOut = false;
    //是否从第一帧开始
    boolean isFirstFrameStart = true;

    boolean isPause = false;
    SpannableStringBuilder ssb = new SpannableStringBuilder();

    private ImageView imageView;
    private MakeupBeautyUtils makeupBeautyUtils;
    private Handler handler = new Handler(new Handler.Callback() {
        @Override
        public boolean handleMessage(Message msg) {
            if(msg.obj instanceof Bitmap){

                imageView.setImageBitmap((Bitmap) msg.obj);
            }
            return false;
        }
    });

    // 加入连续出矩形框的次数
    private int faceBoxOut;


    private void startTimer(TimerTask task) {
        // 每经过指定时间，发送一次广播
        if (mTimer != null) {
            mTimer.cancel();
        }
        mTimer = new Timer();
        mTimer.schedule(task, TIME_OUT);

    }


    private void coverTip(final String tip) {
        runOnUiThread(new Runnable() {
            @Override
            public void run() {
                tv_tip.setText(tip);
            }
        });

    }


    private void delay(int ms){
        try {
            recognizeThread.currentThread();
            tv_tip.setText("动作检测超时，重新开始动作检测");
            recognizeThread.sleep(ms);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }


    //设置启动第一帧
    public void ReStartFrame() {
        timefinish = true; // 重置倒计时开始
        isFirstFrameStart = true;
        //把所有的变量重置
        isFrontRecognizeSuccess = false;
        isBackRecognizeSuccess = false;
        isUploadImageComplete = 0;
        isTimeOut=false;
        if (mTimer != null) {
            //  Log.e("k8", "倒计时取消");
            mTimer.cancel();
        }
    }


    public void DrawFaceBox() {
        //这里进行绘制
        Bitmap   mybitmap = Bitmap.createBitmap((int)(screenwidth*bmapscale), (int)(screenheight*bmapscale), Bitmap.Config.ARGB_8888);//设置位图的宽高
        //画布
        Canvas mycanvas = new Canvas(mybitmap);
        //画笔
        Paint mypaint = new Paint(Paint.DITHER_FLAG);;
        mypaint.setStyle(Paint.Style.STROKE);//设置非填充
        mypaint.setStrokeWidth(1);//笔宽5像素
        mypaint.setColor(Color.RED);//设置为红笔
        mypaint.setAntiAlias(true);//锯齿不显示
        mypaint.setDither(true);//设置图像抖动处理
        mypaint.setStrokeJoin(Paint.Join.ROUND);//设置图像的结合方式
        mypaint.setStrokeCap(Paint.Cap.ROUND);//设置画笔为圆形样式

        //绘制BOX
        //   mycanvas.drawLine(0,0,108,192,mypaint);

        mycanvas.drawLine(boxleft,boxtop,boxright,boxtop,mypaint);

        mycanvas.drawLine(boxleft,boxtop,boxleft,boxbottom,mypaint);

        mycanvas.drawLine(boxleft,boxbottom,boxright,boxbottom,mypaint);

        mycanvas.drawLine(boxright,boxtop,boxright,boxbottom,mypaint);


        Paint paint=new Paint();
        paint.setColor(Color.RED);  //设置画笔颜色
        paint.setStyle(Paint.Style.FILL);//设置填充样式
        paint.setStrokeWidth(15);//设置画笔宽度



        for (int i = 39; i < 53; ++i)
//      for (int i = 0; i < landmarksInfos.size(); ++i)
            {

            mycanvas.drawPoint(screenwidth*bmapscale - landmarksInfos.get(i).X , landmarksInfos.get(i).Y, paint);
            System.out.println("landmarksInfos:" + landmarksInfos.get(i).X + ", " + landmarksInfos.get(i).Y);
        }


        Paint mypaint1 = new Paint(Paint.DITHER_FLAG);;
        mypaint1.setStyle(Paint.Style.STROKE);//设置非填充
        mypaint1.setStrokeWidth(1);//笔宽5像素
        mypaint1.setColor(Color.GREEN);//设置为红笔
        mypaint1.setAntiAlias(true);//锯齿不显示
        mypaint1.setDither(true);//设置图像抖动处理
        mypaint1.setStrokeJoin(Paint.Join.ROUND);//设置图像的结合方式
        mypaint1.setStrokeCap(Paint.Cap.ROUND);//设置画笔为圆形样式

        dboxleft=screenwidth*bmapscale*leftdis;
        dboxright=screenwidth*bmapscale*(1-leftdis);

        dboxtop=screenheight*bmapscale*updis;
        dboxbottom=screenheight*bmapscale*(1-updis);

        mycanvas.drawLine(dboxleft,dboxtop,dboxright,dboxtop,mypaint1);

        mycanvas.drawLine(dboxleft,dboxtop,dboxleft,dboxbottom,mypaint1);

        mycanvas.drawLine(dboxleft,dboxbottom,dboxright,dboxbottom,mypaint1);

        mycanvas.drawLine(dboxright,dboxtop,dboxright,dboxbottom,mypaint1);

        cropView.setImageBitmap(mybitmap);


        cropView.invalidate();

    }


//    @Override
//    protected void onCreate(Bundle savedInstanceState) {
//        super.onCreate(savedInstanceState);
//
//
//        setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
//        setContentView(R.layout.activity_camera_bank_card);
////        setContentView(R.layout.activity_camera_new);
//
//        bankCardDetection = BankCardDetection1.create(getAssets());
//
//        // OpenCV 的初始化导入
////        OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION, this, mLoaderCallback);
////        OpenCVNativeLoader loader = new OpenCVNativeLoader();
////        loader.init();
//
//        initProgress();
//
//        cameraPreview = (CameraPreview) findViewById(R.id.camera_surface);
//
//        tv_tip = findViewById(R.id.tv_tip);
//        //获取屏幕最小边，设置为cameraPreview较窄的一边
//        float screenMinSize = Math.min(getResources().getDisplayMetrics().widthPixels, getResources().getDisplayMetrics().heightPixels);
//        //根据screenMinSize，计算出cameraPreview的较宽的一边，长宽比为标准的16:9
//        float maxSize = screenMinSize / 9.0f * 16.0f;
//        RelativeLayout.LayoutParams layoutParams;
//
////        layoutParams = new RelativeLayout.LayoutParams((int) screenMinSize, (int) maxSize);
//        layoutParams = new RelativeLayout.LayoutParams((int) maxSize, (int) screenMinSize);
//        layoutParams.addRule(RelativeLayout.CENTER_IN_PARENT);
//        cameraPreview.setLayoutParams(layoutParams);
//        containerInnerView = findViewById(R.id.camera_crop_container_inner);
//        cropView = (ImageView) findViewById(R.id.camera_crop);
//
//        cropView0 = (ImageView) findViewById(R.id.camera_crop0);
//
//        cameraPreview.setOnClickListener(this);
//        cameraPreview.setOnFrameListener(new CameraPreview.onFrame() {
//            @RequiresApi(api = Build.VERSION_CODES.N)
//            @Override
//            public void onBitmap(final Bitmap bitmap)  {
//                if (ignorenum > 0) {
//                    // tv_tip.setText("倒数中");
//                    ignorenum--;
//                    if (ignorenum == 0) {
//                        isignoreend = true;
//                        //执行一次对焦
//                        CamAutoFocus();
//                    }
//                }
//                //对焦优化
//                if (!isfocusover) {
//                    if (usetimeoptifocus) {
//                        long currenttime = System.currentTimeMillis();
//                        Log.e("mytimelog", "lastfocustime : " + lastfocustime);
//                        Log.e("mytimelog", "currenttime : " + currenttime);
//                        long timesp = currenttime - lastfocustime;
//                        Log.e("mytimelog", "timesp : " + timesp);
//                        if (timesp > allowfocustsp) {
//                            //强行二次对焦
//                            CamAutoFocus();
//                        }
//                    } else {
//                        currentfocusfnum--;
//                        if (currentfocusfnum == 0) {
//                            //强行二次对焦
//                            CamAutoFocus();
//                        }
//                    }
//                }
//                if (isFeed && isignoreend && isfocusover) {
//                    isFeed = false;
//
//                    // 缩小BITMAP
//                    float scaleDown = 0.1f;  // 图片缩小的比例，对于身份证的ocr识别可以相对大点。
//                    Matrix matrix = new Matrix();
//                    matrix.setScale(scaleDown, scaleDown);
//                    int bitWidth = bitmap.getWidth();
//                    int bitHeight = bitmap.getHeight();
//                    Bitmap bm = Bitmap.createBitmap(bitmap, 0, 0, bitWidth, bitHeight, matrix, true);
//
//
//                    Mat grayMat = new Mat();
//                    Mat cannyEdges0 = new Mat();
//                    long time0, time1, time2, time3, time4, time5, time6, time7;
//                    time0 = System.currentTimeMillis();
//
//                    // Bitmap转为Mat
//
//                    int pictureHeight = bm.getHeight();
//                    int pictureWidth = bm.getWidth();
//
//                    Mat src = new Mat(pictureHeight, pictureWidth, CvType.CV_8UC1);
//                    Utils.bitmapToMat(bm, src);
//
//                    // 原图置灰
//                    Imgproc.cvtColor(src, grayMat, Imgproc.COLOR_BGR2GRAY);
//
//
//
//
//
////                    /**
////                     * 3、对灰度图像进行二值化处理
////                     */
////                    Mat binaryMat = new Mat(grayMat.height(),grayMat.width(),CvType.CV_8UC1);
////                    Imgproc.threshold(grayMat, binaryMat, 40, 255, Imgproc.THRESH_BINARY);
////
////                    /**
////                     * 4、图像腐蚀---腐蚀后变得更加宽,粗.便于识别--使用3*3的图片去腐蚀
////                     */
////                    Mat destMat = new Mat(); //腐蚀后的图像
////                    Mat element = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3));
////                    Imgproc.erode(binaryMat,destMat,element);
//
//                    Mat dst = new Mat();
//
//                    // top-hat enhance contrast
//                    Imgproc.morphologyEx(grayMat, dst, Imgproc.MORPH_TOPHAT,
//                            Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(9, 3)));
//                    Imgproc.GaussianBlur(dst, dst, new Size(13, 13), 0);
//                    Imgproc.Canny(dst, dst, 300, 600, 5, true);
//                    Imgproc.dilate(dst, dst, new Mat(), new Point(-1, -1), 5);
//                    Size heavy = new Size(35, 5);
//                    // apply a second dilate operation to the binary image
//                    Imgproc.dilate(dst, dst,
//                            Imgproc.getStructuringElement(Imgproc.MORPH_RECT, heavy));
//
//                    // enhance black area by black-hat
//                    Imgproc.morphologyEx(grayMat, dst, Imgproc.MORPH_BLACKHAT,
//                            Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(18, 10)));
//                    Imgproc.GaussianBlur(dst, dst, new Size(13, 13), 0);
//                    Imgproc.Canny(dst, dst, 300, 600, 5, true);
//                    Size heavy0 = new Size(35, 3);
//                    Imgproc.dilate(dst, dst,
//                            Imgproc.getStructuringElement(Imgproc.MORPH_RECT, heavy0));
//
////                    Bitmap processedImage = Bitmap.createBitmap(dst.cols(), dst.rows(), Bitmap.Config.ARGB_8888);
////                    Utils.matToBitmap(dst, processedImage);
////                    cropView.setImageBitmap(processedImage);
//
//
//
//
//
//
//
//                    // Canny边缘检测器检测图像边缘
//                    // Canny边缘检测器检测图像边缘
//                    // image - 表示此操作的源（输入图像）的Mat对象。
//                    // edge - 表示此操作的目的地（边缘）的Mat对象。
//                    // threshold1 - 类型为double的变量，表示迟滞过程的第一个阈值。
//                    // threshold2 - 类型为double的变量，表示迟滞过程的第二个阈值。
//                    Imgproc.Canny(grayMat, cannyEdges0, 10, 120);
//
//
//                    // Mat转Bitmap
////                    Bitmap processedImage = Bitmap.createBitmap(cannyEdges0.cols(), cannyEdges0.rows(), Bitmap.Config.ARGB_8888);
////                    Utils.matToBitmap(cannyEdges0, processedImage);
////                    cropView.setImageBitmap(processedImage);
//
//
//                    Mat hierarchy = new Mat();
//                    // 保存轮廓
//                    ArrayList<MatOfPoint> contourList = new ArrayList<>();
//
//                    // 检测轮廓
//                    //     检测轮廓
//                    //     第一个参数：image，单通道图像矩阵，可以是灰度图，但更常用的是二值图像，一般是经过Canny、拉普拉斯等边
//                    //                         缘检测算子处理过的二值图像；
//                    //    第二个参数：contours，定义为“vector<vector<Point>> contours”，是一个向量，并且是一个双重向量，向量
//                    //               内每个元素保存了一组由连续的Point点构成的点的集合的向量，每一组Point点集就是一个轮廓。
//                    //               有多少轮廓，向量contours就有多少元素。
//                    //    第三个参数：hierarchy，定义为“vector<Vec4i> hierarchy”，先来看一下Vec4i的定义：
//                    //                               typedef    Vec<int, 4>   Vec4i;
//                    //               Vec4i是Vec<int,4>的别名，定义了一个“向量内每一个元素包含了4个int型变量”的向量。
//                    //               所以从定义上看，hierarchy也是一个向量，向量内每个元素保存了一个包含4个int整型的数组。
//                    //               向量hiararchy内的元素和轮廓向量contours内的元素是一一对应的，向量的容量相同。
//                    //               hierarchy向量内每一个元素的4个int型变量——hierarchy[i][0] ~hierarchy[i][3]，分别表示第
//                    //            i个轮廓的后一个轮廓、前一个轮廓、父轮廓、内嵌轮廓的索引编号。如果当前轮廓没有对应的后一个
//                    //            轮廓、前一个轮廓、父轮廓或内嵌轮廓的话，则hierarchy[i][0] ~hierarchy[i][3]的相应位被设置为
//                    //            默认值-1。
//                    //    第四个参数：int型的mode，定义轮廓的检索模式：
//                    //               取值一： RETR_EXTERNAL，包含在外围轮廓内的内围轮廓被忽略
//                    //               取值二：RETR_LIST   检测所有的轮廓，包括内围、外围轮廓，但是检测到的轮廓不建立等级关
//                    //                      系，彼此之间独立，没有等级关系，这就意味着这个检索模式下不存在父轮廓或内嵌轮廓，
//                    //                      所以hierarchy向量内所有元素的第3、第4个分量都会被置为-1，具体下文会讲到
//                    //               取值三：RETR_CCOMP  检测所有的轮廓，但所有轮廓只建立两个等级关系，外围为顶层，若外围
//                    //                      内的内围轮廓还包含了其他的轮廓信息，则内围内的所有轮廓均归属于顶层
//                    //               取值四：RETR_TREE， 检测所有轮廓，所有轮廓建立一个等级树结构。外层轮廓包含内层轮廓，内层轮廓还可以继续包含内嵌轮廓。
//                    //    第五个参数：int型的method，定义轮廓的近似方法：
//                    //               取值一：CHAIN_APPROX_NONE 保存物体边界上所有连续的轮廓点到contours向量内
//                    //               取值二：CHAIN_APPROX_SIMPLE 仅保存轮廓的拐点信息，把所有轮廓拐点处的点保存入contours向量内，拐点与拐点之间直线段上的信息点不予保留
//                    //               取值三和四：CHAIN_APPROX_TC89_L1，CHAIN_APPROX_TC89_KCOS,使用teh-Chinl chain 近似算法
//                    //    第六个参数：Point偏移量，所有的轮廓信息相对于原始图像对应点的偏移量，相当于在每一个检测出的轮廓点上加上该偏移量，并且Point还可以是负值！
//                    // Imgproc.RETR_EXTERNAL *****;
//                    Imgproc.findContours(cannyEdges0, contourList, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
//
//                    // 画出轮廓
//                    Mat contours = new Mat();
//                    contours.create(cannyEdges0.rows(), cannyEdges0.cols(), CvType.CV_8UC3);
//                    Random r = new Random();
//                    Log.e("mylog1", "contourList.size() : " + contourList.size());
//                    for (int i = 0; i < contourList.size(); i++) {
//
//                        MatOfPoint contourListi = contourList.get(i);
////                        Log.e("mylog1", "******** " );
////                        Log.e("mylog1", "contourListi: " + contourListi.toList().size());
//                        if (contourListi.toList().size() > 50)
//                            Imgproc.drawContours(contours, contourList, i, new Scalar(r.nextInt(255), r.nextInt(255), r.nextInt(255), -1));
//                    }
//
//
//                    // Mat转Bitmap
////                    Bitmap processedImage = Bitmap.createBitmap(contours.cols(), contours.rows(), Bitmap.Config.ARGB_8888);
////                    Utils.matToBitmap(contours, processedImage);
////                    cropView.setImageBitmap(processedImage);
//
//
//
//                    Log.e("mylog1", "cannyEdges0.width : " + cannyEdges0.width());
//                    Log.e("mylog1", "cannyEdges0.height : " + cannyEdges0.height());
//                    Mat graygrayMat = new Mat();
//                    Imgproc.cvtColor(contours, graygrayMat, Imgproc.COLOR_BGR2GRAY);
////                    Mat cannyEdges = cannyEdges0;
//                    Mat cannyEdges = graygrayMat;
//                    Log.e("mylog1", "cannyEdges.width : " + cannyEdges.width());
//                    Log.e("mylog1", "cannyEdges.height : " + cannyEdges.height());
//
//
//                    time1 = System.currentTimeMillis();
//                    Mat lines = new Mat();
//
//                    // 霍夫直线检测
//                    // cv::HoughLinesP(
//                    //InputArray src, // 输入图像，必须8-bit的灰度图像
//                    //OutputArray lines, // 输出的极坐标来表示直线，经过调用HoughLinesP函数后后存储了检测到的线条的输出矢量，每一条线由具有四个元素的矢量(x_1,y_1, x_2, y_2） 表示，其中，(x_1, y_1)和(x_2, y_2) 是是每个检测到的线段的结束点。
//                    //double rho, // 生成极坐标时候的像素扫描步长，一般取值为 1
//                    //double theta, //生成极坐标时候的角度步长，一般取值CV_PI/180，即表示一度
//                    //int threshold, // 阈值，只有获得足够交点的极坐标点才被看成是直线
//                    //double minLineLength=0;// 最小直线长度，有默认值0，表示最低线段的长度，比这个设定参数短的线段就不能被显现出来。
//                    //double maxLineGap=0;// 最大间隔，有默认值0，允许将同一行点与点之间连接起来的最大的距离.)
//                    int thresholdNumb = (int) pictureWidth/5;  // 即要求直线的最小长度大于宽的 1/3;
//                    Log.e("mylog1", "pictureHeight : " + pictureHeight);
//                    Log.e("mylog1", "pictureWidth : " + pictureWidth);
//                    Log.e("mylog1", "thresholdNumb : " + thresholdNumb);
//
//                    Imgproc.HoughLinesP(cannyEdges, lines, 0.5, Math.PI/180, thresholdNumb, 50, 10);
//
////                    Imgproc.HoughLines(cannyEdges, lines, 1, Math.PI / 180, 10, 0, 0);
//                    time2 = System.currentTimeMillis();
//                    Mat houghLines = new Mat();
//                    houghLines.create(cannyEdges.rows(), cannyEdges.cols(), CvType.CV_8UC3);
//                    Log.e("mylog1", "lines.width : " + lines.width());
//                    Log.e("mylog1", "lines.height : " + lines.height());
//                    Log.e("mylog1", "lines.cols : " + lines.cols());
//                    Log.e("mylog1", "lines.rows : " + lines.rows());
//
//                    int linesNumb = 100;
//                    if (lines.rows() < linesNumb) linesNumb = lines.rows();
//                    int linesNumbY = 0;
//                    int linesNumbX = 0;
//                    for (int i = 0; i < linesNumb; i++)
//                    {
//                        double[] points = lines.get(i, 0);
//                        double x1, y1, x2, y2;
//                        double lineWidth, lineHeight;  // 边缘检测出的水平会垂直线段的长度
//                        x1 = points[0];
//                        y1 = points[1];
//                        x2 = points[2];
//                        y2 = points[3];
//
//                        Point pt1 = new Point(x1, y1);
//                        Point pt2 = new Point(x2, y2);
//                        Log.e("mylog1", "********** ");
//                        Log.e("mylog1", "pt1 : " + pt1);
//                        Log.e("mylog1", "pt2 : " + pt2);
//
//                        lineWidth = Math.abs(x1 - x2);
//                        lineHeight = Math.abs(y1 - y2);
//                        double  arctanX = Math.atan(lineWidth / (lineHeight + 0.01));
//                        double  arctanY = Math.atan(lineHeight / (lineWidth + 0.01));
//                        double arctanXToDegrees = Math.toDegrees(arctanX);
//                        double arctanYToDegrees = Math.toDegrees(arctanY);
//                        Log.e("mylog1", "arctanX : " + arctanX);
//                        Log.e("mylog1", "arctanXToDegrees: " + arctanXToDegrees);
//                        Log.e("mylog1", "arctanY : " + arctanY);
//                        Log.e("mylog1", "arctanYToDegrees: " + arctanYToDegrees);
//
//                        if(lineHeight < 48) {
//
//                            linesNumbX++;
//                            Log.e("mylog1", "linesNumbX = " + linesNumbX);
//                            Imgproc.line(houghLines, pt1, pt2, new Scalar(255, 0, 0), 1);
//                            if (linesNumbX>4) break;
//
//                        }
//
//                        if(lineWidth < 48 ) {                       // 绘制直线
//
//                            linesNumbY++;
//                            Log.e("mylog1", "linesNumbY = " + linesNumbY);
//                            Imgproc.line(houghLines, pt1, pt2, new Scalar(255, 0, 0), 1);
//                            if (linesNumbY>4) break;
//
//                        }
//
//                    }
//
//                    Log.e("mylog1", "lines.cols() =" + lines.cols());
//                    time3 = System.currentTimeMillis();
//
//                    //Mat转Bitmap
////                    Mat bitmapMat = houghLines;
////                    Bitmap processedImage = Bitmap.createBitmap(bitmapMat.cols(), bitmapMat.rows(), Bitmap.Config.ARGB_8888);
////                    Utils.matToBitmap(bitmapMat, processedImage);
////                    cropView.setImageBitmap(processedImage);
//
//
//
//
//                     //图片裁剪的核心功能
//
//                    int bitmapWidth = bitmap.getWidth();
//                    int bitmapHeight = bitmap.getHeight();
//
//                    Bitmap bitmap1 = Bitmap.createBitmap(bitmap, //原图
//                            bitmapWidth * 4/19, //图片裁剪横坐标开始位置
//                            bitmapHeight/3, //图片裁剪纵坐标开始位置
//                            bitmapWidth/2, //要裁剪的宽度
//                            bitmapHeight/3); //要裁剪的高度
//
//                    Log.e("mylog111", "bitmap.getWidth = " + bitmap.getWidth() + "; bitmap.getHeight = " + bitmap.getHeight());
////                    SaveBitmap(bitmap1);
//                    Log.e("mylog111", "bitmap1.getWidth = " + bitmap1.getWidth() + "; bitmap1.getHeight = " + bitmap1.getHeight());
////                    SaveBitmap(bitmap);
////                    cropView.setImageBitmap(bitmap1);
//
//
//
//
//                    time4 = System.currentTimeMillis();
//                    Log.e("mylog1", "cost0 = " + (time1 - time0) + "; cost1 = " + (time2 - time1));
//                    Log.e("mylog1", "cost2 = " + (time3 - time2) + "; cost3 = " + (time3 - time2));
//                    Log.e("mylog1", "cost4 = " + (time4 - time3) + "; totalCost = " + (time4 - time0));
//
//                    recognize(bitmap);
//
//                }
//            }
//        });
//        //对焦成功
//        cameraPreview.setonFocusSuccessListener(new CameraPreview.onFocusSuccess() {
//            @Override
//            public void onFocusSuccess() {
//
//                Log.e("mylog", "focusend");
//                isfocusover = true;
//            }
//        });
//
//        setScanFrameSize(428, 270);
//    }

    int NUm = 0;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        IDCARD = SPUtils.getInstance().getString("idcard", "421127198706140090");

        faceDetect = FaceDetect.create(FaceDetect.CMD_ACTION_START);

        setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
        setContentView(R.layout.activity_intent_camera1);


        makeupBeautyUtils = new MakeupBeautyUtils();


        initProgress();
        initRecognizeThread();
        cameraPreview1 = (CameraPreview1) findViewById(R.id.camera_surface);
        tv_tip = findViewById(R.id.tv_tip);
        cropView = (ImageView) findViewById(R.id.iv_face);


        load();


//        cropView.setImageResource(R.mipmap.camera_idcard_front);
        cameraPreview1.setOnClickListener(this);


        cameraPreview1.setOnFrameStream(new CameraPreview1.onFrameStream() {
            @Override
            public void onStream(byte[] bytes) {
                sendBytes(bytes);
            }
        });

        cameraPreview1.setonPreStart (new CameraPreview1.OnPreStart() {
            @Override
            public void OnStart() {
                KitCoreInit();
            }
        });

        //对焦成功
        cameraPreview1.setonFocusSuccessListener(new CameraPreview1.onFocusSuccess() {
            @Override
            public void onFocusSuccess() {
                boolean isfocusover = true;
            }
        });

        addTip("请将脸对准正确的位置");
        cameraPreview1.startPreview();

    }

    private void load() {
        try {
            Bitmap bitmap = BitmapUtils.getBitmapByAssetsName(this,"makeup1.jpeg");
            CommonShareBitmap.originBitmap = bitmap;
            makeupBeautyUtils.progress(bitmap,handler);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }


    public void KitCoreInit() {

        Camera.Size size = cameraPreview1.camera.getParameters().getPreviewSize();

        // 由于Android前置摄像头采集的数据实际总是偏差90，
        // 所以这里采集的屏幕宽和高需要对调。
        previewHeight = size.width;
        previewWidth = size.height;

        ScreenHeight =  size.width;
        ScreenWidth = size.height;


//        previewHeight = size.height;
//        previewWidth = size.width;
//
//        ScreenHeight =  size.height;
//        ScreenWidth = size.width;


        Log.e("KitCoreInit", "***************************** KitCoreInit *****************************");
        Log.e("KitCoreInit", "previewHeight = " + previewHeight + "; previewWidth = " + previewWidth);
        Log.e("KitCoreInit", "ScreenHeight = " + ScreenHeight + "; ScreenWidth = " + ScreenWidth);

        mNV21Bytes = new byte[previewHeight * previewWidth];
        /**
         * init
         * */
        KitCore.init(
                this,
                AndroidConfig
                        .create()
                        .setCameraMode()
                        .setDefaultFunc()
                        .setDefaultInputImageFormat()
                        .setNormalMode()
                        .setInputImageSize(previewWidth, previewHeight)
                        .setOutputImageSize((int) ScreenWidth, (int) ScreenHeight)
        );

    }


    //开启线程  子线程接受命令
    private void initRecognizeThread() {

        Log.e("mylog1", "***************************** initRecognizeThread() *****************************");

        recognizeThread = new HandlerThread("recognize-thread");
        recognizeThread.start();
        recognizeHandler = new Handler(recognizeThread.getLooper()) {
            @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR1)
            @Override
            public void handleMessage(Message msg) {

                Log.e("mylog1", "***************************** initRecognizeThread()_handleMessage(Message msg)*****************************");

                if (msg.what == RECOGNIZE_STREAM) {

                    byte[] bytes = (byte[]) msg.obj;
                    ScanResult scanResult = new ScanResult();
                    scanResult.bytes = bytes;

                    Bitmap bitmap = cameraPreview1.nv21tool.nv21ToBitmap(bytes, ScreenWidth, ScreenHeight);
                    scanResult.bitmap = bitmap;

                    if(isFirstFrameStart) {

                        scanResult.isStart = true;
                        isFirstFrameStart=false;

                    }

                    ScanResult result = faceDetect.detect(scanResult);

                    Log.e("h2_001", "***** 前端成功识别合格普通人脸,上传图片到服务器 ***** ");
                    Log.e("h2_001", "result.isStart = " + result.isStart);
                    if(result.isStart) {
                        if(result.status==0)
                        {
                             //设置为上传中的状态
                             isUploadImageComplete=1;
                             Log.e("h2_001", "***** 前端成功识别合格普通人脸,上传图片到服务器 ***** ");

                        }
                    }

                    //将获得的消息传给主线程
                    //由主线程进行判断
                    Message mainmsg = mainHandler.obtainMessage();
                    mainmsg.what = FRONT_RECOGNIZEOVER;
                    mainmsg.obj = result;
                    mainHandler.sendMessage(mainmsg);

                }
            }
        };
    }


    private void addTip(final String tip){
        runOnUiThread(new Runnable() {
            @Override
            public void run() {
//                ssb.append(tip+"\n");
//                tv_tip.setText(ssb);
                tv_tip.setText(tip);
            }
        });

    }


    private void suspendRecognizeThread(){
        isPause = true;
    }


    @Override
    protected void onResume() {
        super.onResume();
    }


    @Override
    protected void onPause() {
        super.onPause();

    }


    @Override
    protected void onDestroy() {
        super.onDestroy();
        makeupBeautyUtils.destroy();
        stopPreView();
        mTimer.cancel();
        suspendRecognizeThread();
    }

    boolean timefinish=true;





    //主线程处理线程过来的消息
    @SuppressLint("HandlerLeak")
    Handler mainHandler = new Handler() {

        @Override
        public void handleMessage(Message msg) {

            Log.e("mylog1", "***************************** handleMessage(Message msg) *****************************");

            boolean isstartfail=false;

            super.handleMessage(msg);

            switch (msg.what)
            {
                //前端扫描结束
                case FRONT_RECOGNIZEOVER: {
                    ScanResult sresult = (ScanResult) msg.obj;

                    boxtop=sresult.boxtop;
                    boxleft=screenwidth*bmapscale-sresult.boxright;
                    boxbottom=sresult.boxbottom;
                    boxright=screenwidth*bmapscale-sresult.boxleft;

//                    faceboxheight = sresult.faceboxheight;
//                    faceboxwidth = sresult.faceboxwidth;




                    facePosition = false; // 人脸是否在框里面
                    Log.e("h2_0004", "facePosition 0 = " + facePosition);
                    if (boxtop >= dboxtop && boxleft >= dboxleft && dboxbottom >= boxbottom && dboxright >= boxright)



                    Log.e("h2_0004", "facePosition 1 = " + facePosition);
//                    DrawFaceBox();

                    // 如果使用者人脸Box一直出框，则重新开始进动作活体检测
                    if(facePosition) faceBoxOut = 0;
                    else faceBoxOut++;
                    if (faceBoxOut > 5) ReStartFrame();

                    Log.e("h2_0004", "boxtop = " + boxtop + ";  boxleft = " + boxleft + ";  boxbottom = " + boxbottom + ";  boxright = " + boxright);
                    Log.e("h2_0004", "dboxtop = " + dboxtop + ";  dboxleft = " + dboxleft + ";  dboxbottom = " + dboxbottom + ";  dboxright = " + dboxright);
                    landmarksInfos = sresult.landmarksInfos;
                    if(sresult.status == 0 && landmarksInfos.size() > 100) {
                        Log.e("k99", "now2");
//                        dboxleft=screenwidth*bmapscale*leftdis;
//                        dboxright=screenwidth*bmapscale*(1-leftdis);
//
//                        dboxtop=screenheight*bmapscale*updis;
//                        dboxbottom=screenheight*bmapscale*(1-updis);

//                        测试用，默认上传的照片返回值满足比对
//                        isBackRecognizeSuccess = true;


//                        DrawFaceBox();

                        long time0, time1;
                        time0 = System.currentTimeMillis();
                        if(1>0) {
                            // 瘦脸操作
                            Bitmap bitmapp0 = sresult.bitmap;
                            SaveBitmap(bitmapp0);
                            List<Point> leftFacePoint = new ArrayList<>(34);
                            List<Point> rightFacePoint = new ArrayList<>(34);
                            for (int i = 55; i < 69; ++i) {
//                                Point pt1 = new Point((int) (screenwidth*bmapscale - landmarksInfos.get(i).X), (int) landmarksInfos.get(i).Y);
                                Point pt1 = new Point((int) landmarksInfos.get(i).X, (int) landmarksInfos.get(i).Y);
                                Log.e("h2_001", "smallFaceMesh pt1 = " + pt1.toString());
                                leftFacePoint.add(pt1);
                            }

                            for (int i = 39; i < 53; ++i) {
//                                Point pt2 = new Point((int) (screenwidth*bmapscale - landmarksInfos.get(i).X), (int) landmarksInfos.get(i).Y);
                                Point pt2 = new Point((int) landmarksInfos.get(i).X, (int) landmarksInfos.get(i).Y);
                                rightFacePoint.add(pt2);
                            }

                            Point centerPoint = new Point((int) landmarksInfos.get(179).X,
                                    (int) landmarksInfos.get(179).Y);

                            Bitmap processedImage0 = smallFaceMesh(bitmapp0, leftFacePoint, rightFacePoint, centerPoint, 4);
                            Point centerPointLeft = new Point((int) (landmarksInfos.get(102).X + landmarksInfos.get(110).X)/2,
                                    (int) (landmarksInfos.get(102).Y + landmarksInfos.get(110).Y)/2);
                            Point centerPointRight = new Point((int) (landmarksInfos.get(118).X + landmarksInfos.get(126).X)/2,
                                    (int) (landmarksInfos.get(102).Y + landmarksInfos.get(110).Y)/2);
                            Bitmap processedImage1 = magnifyEye(processedImage0, centerPointLeft, 80,  4);
                            Bitmap processedImage2 = magnifyEye(processedImage1, centerPointRight, 80,  4);

//                            Handler handler = null;
//                            MakeupBeautyUtils.progress(processedImage2, handler);

                            cropView.setImageBitmap(processedImage2);
                            SaveBitmap(processedImage2);
                            Log.e("h2_001", "smallFaceMesh");
                        }
                        time1 = System.currentTimeMillis();
                        Log.e("myLog", "smallFaceMesh_cost:" + (time1 - time0));

                    }

                    if(!facePosition){
//                        addTip("请调整您的头像位置");
                        tv_tip.setText("请调整您的头像位置");

                    }

                    Log.e("h2_001", "返回状态:"+sresult.status);
                    Log.e("h2_001", "当前帧:"+(sresult.currentactionindex+1));

                    // 1.设置成功 ；2.判断第一帧是否失 ；3.第一帧失败提示；4.第一帧成功提示
                    Log.e("h2_001", "sresult.currentactionindex = "+sresult.currentactionindex);
                    Log.e("h2_001", "sresult.status = "+sresult.status);
                    Log.e("h2_001", "sresult.isSucess = "+sresult.isSucess);
                    if (sresult.isSucess == 0) {
                        //前端意愿识别成功

                        isFrontRecognizeSuccess = true;
//                        isStart = true;

                    }
                    else if(sresult.isStart) {
                        //第一帧检测失败
                        if(sresult.status!=0) {
                            isstartfail = true;
                        }
                    }

                    else if(sresult.currentactionindex==0 && sresult.status==0 && facePosition
                            && isBackRecognizeSuccess) {
                        //开始意愿检测
//                        if(ignorenum % 100 == 0 || ignorenum == 0) addTip("开始动作检测");

                        if (sresult.action == 2001) actionOfClass = "请向右摇头";

                        Log.e("h2_0005", "facePosition 0 = " + facePosition);
                        Log.e("h2_0005", "timefinish = " + timefinish);
                    }

                    Log.e("h2_0005", "facePosition 1 = " + facePosition);

                    break;
                }

                //上传成功
                case SCAN_RESULT:  {
                    //清空文字
                    FaceDetectResult bean = (FaceDetectResult) msg.obj;
//                    if (bean.real == 1 && bean.matched == 1)
                    if (bean.matched == 1)
                        {
                        //上传结果成功
                        isBackRecognizeSuccess = true;

                        //   addTip("活体人脸匹配成功");

                    } else{
                        // Log.e("cmd","活体人脸和身份证匹配不成功");
                        String strreal="否";
                        String strmatch="否";

                        if(bean.real==1) {
                            strreal="是";
                        }

                        if(bean.matched==1) {
                            strmatch="是";
                        }

                        addTip("活体检测："+strreal+" 身份证匹配:"+strmatch);
                        ReStartFrame();
                        return;
                    }

                    isUploadImageComplete=2;
                    break;
                }
                //上传失败
                case SCAN_ERROR: {
//                    Toast.makeText(CameraIntentDetectActivity1.this,
//                            (String) msg.obj + "上传错误:", Toast.LENGTH_SHORT).show();
                    isUploadImageComplete=2;
                    break;
                }


                //时间到了
                case FIRSTTIMEOUT: {
                    //   Log.e("k8", "收到倒计时超时的消息");
                    isTimeOut=true;
                    break;
                }

                default:
                    break;
            }

            //只有所有的动作完成才可以喂帧
            if(msg.what==FRONT_RECOGNIZEOVER) {

                isFronthandleImage = false;
            }
        }

    };


    // 发送 bytes格式图片数据
    private void sendBytes(byte[] bytes) {

        //判断是否可以喂帧
        if(!isFronthandleImage) {
            if (recognizeThread.isAlive()) {
                //   Log.e("k7", "开始喂帧");
                isFronthandleImage = true;

                // SaveBitmap(temp);
                //如果是第一帧启动定时器
                if(isFirstFrameStart) {
                    //  Log.e("k8", "启动倒计时");
                    startTimer(new TimerTask() {
                        @Override
                        public void run() {
                            //        Log.e("k8", "倒计时超时");
                            //给主线程发送消息
                            Message msg = mainHandler.obtainMessage();
                            msg.what = FIRSTTIMEOUT;
                            msg.obj = null;
                            mainHandler.sendMessage(msg);

                        }
                    });
                }
                Message msg = recognizeHandler.obtainMessage();
                msg.what = RECOGNIZE_STREAM;
                msg.obj = bytes;
                recognizeHandler.sendMessage(msg);
            }
        }
        else {
            //    Log.e("k7", "不能喂帧");
        }

    }


    //保存文件
    private void SaveBitmap(final Bitmap savebitmap) {
        try {
            new Thread(new Runnable() {
                @Override
                public void run() {
                    String mydir = dir + "/Ok/";
                    try {
                        if (!new File(mydir).exists()) {
                            new File(mydir).mkdir();
                        }
                        final File myfile = new File(mydir + "photo" + System.currentTimeMillis() + ".jpg");
                        myfile.createNewFile();
                        FileOutputStream outstream = new FileOutputStream(myfile);
                        savebitmap.compress(Bitmap.CompressFormat.JPEG, bmpqulity, outstream);
                        outstream.flush();
                        outstream.close();
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            }).start();
        } catch (Exception e) {
            e.printStackTrace();
        }

    }


    //弹出窗体
    private void popResultWindow(String idCardInfo) {
        ViewHolder viewHolder = new ViewHolder(R.layout.layout_dialog);
        DialogPlus dialog = DialogPlus.newDialog(this)


                .setOnItemClickListener(new OnItemClickListener() {
                    @Override
                    public void onItemClick(DialogPlus dialog, Object item, View view, int position) {
                        //        Log.e("k11", "onItemClick");
                    }
                })
                .setOnCancelListener(new OnCancelListener() {
                    @Override
                    public void onCancel(DialogPlus dialog) {
                        //      Log.e("k11", "setOnCancelListener");
                        //重新开始preview
                        ReStartFrame();
                        cameraPreview1.startPreview();
                    }
                })
                .setContentHolder(viewHolder)
                .setExpanded(true)  // This will enable the expand feature, (similar to android L share dialog)
                .create();

        TextView textView = (TextView) dialog.findViewById(R.id.txt);
        textView.setText(idCardInfo);
        //停止预览
        cameraPreview1.stopPreview();

        isFirstFrameStart = true;


        dialog.show();
    }



    private void initProgress() {
        hud = KProgressHUD.create(this)
                .setStyle(KProgressHUD.Style.SPIN_INDETERMINATE)
                .setLabel("Please wait")
                .setCancellable(false);
    }


    private void executeCommand(final int cmd) {
        if(recognizeThread.isAlive()){

            recognizeHandler.post(new Runnable() {
                @Override
                public void run() {
                    FaceDetect.onCommand(cmd);

                    switch (cmd){

                        case FaceDetect.CMD_FACE_RIGHT: // 检测该脸向右
                            break;

                        case FaceDetect.CMD_FACE_LEFT: // 检测该脸向左
                            break;

                        case FaceDetect.CMD_EYE_WINK: // 检测眨眼
                            break;

                        default:
                            break;
                    }
                }
            });
        }

    }


//    Call call;
//    private void uploadImage(final Bitmap bitmap) {
//        isUploadImage = true;
//        recognizeCount++;
//        if(call!=null){
//            call.cancel();
//        }
//        if(uploadThread!=null&&uploadThread.isAlive()){
//            uploadThread.interrupt();
//        }
//
//        try {
//            uploadThread=  new Thread(new Runnable() {
//                @Override
//                public void run() {
//
////                    addTip("startUploadImage");
//                    try {
//                        if (!new File(dir).exists()) {
//                            new File(dir).mkdir();
//                        }
//                        final File file = new File(dir + "photo" + System.currentTimeMillis() + ".jpg");
//                        file.createNewFile();
//                        Log.e("xiong", file.getAbsolutePath());
//                        FileOutputStream out = new FileOutputStream(file);
//                        bitmap.compress(Bitmap.CompressFormat.JPEG, bmpqulity, out);
//                        out.flush();
//                        out.close();
//                        final String uploadurl = "http://192.168.17.58:8383/sfz";
//                        call = okHttpUpUtil.uploadImage1(uploadurl, file.getAbsolutePath(), IDCARD);
//                        String result =call.execute().body().string();
//                        isUploadImage = false;
//                        Log.e("xiong", "****************" + result);
//                        Gson gson = new Gson();
//                        FaceDetectResult faceBean = gson.fromJson(result, FaceDetectResult.class);
//                        Message msg = mainHandler.obtainMessage();
//                        msg.what = SCAN_RESULT;
//                        msg.obj = faceBean;
//                        mainHandler.sendMessage(msg);
//                    } catch (IOException | JSONException e) {
//                        Log.e("xiong", "-----------------++++++++" + e.getMessage());
//                        Message msg = mainHandler.obtainMessage();
//                        msg.what = SCAN_ERROR;
//                        msg.obj = e.getMessage();
//                        mainHandler.sendMessage(msg);
//                        e.printStackTrace();
//                    }
//                }
//            }) ;
//
//            uploadThread.start();
//
//        } catch (Exception e) {
//            Log.e("xiong", e.getMessage());
//            Message msg = mainHandler.obtainMessage();
//            msg.what = SCAN_ERROR;
//            msg.obj = e.getMessage();
//            mainHandler.sendMessage(msg);
//            Log.e("xiong", e.getMessage());
//            e.printStackTrace();
//        }
//    }


    private void stopPreView() {
        cameraPreview1.stopPreview();
    }

    /**
     * 根据手机的分辨率从 dp 的单位 转成为 px(像素)
     */

    public static int dip2px(Context context, float dpValue) {
        final float scale = context.getResources().getDisplayMetrics().density;
        return (int) (dpValue * scale + 0.5f);
    }


    @Override
    public void onClick(View v) {
        int id = v.getId();
    }


}
