package com.hyq.hm.hyperlandmark.activity;

import android.Manifest;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.RectF;
import android.graphics.Typeface;
import android.hardware.Camera;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.SystemClock;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.PermissionChecker;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.util.Size;
import android.util.TypedValue;
import android.view.Display;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Toast;

import com.hyq.hm.hyperlandmark.R;
import com.hyq.hm.hyperlandmark.adapter.SampleAdapter;
import com.hyq.hm.hyperlandmark.app.AppConstance;
import com.hyq.hm.hyperlandmark.hander.Classifier;
import com.hyq.hm.hyperlandmark.hander.OverlayView;
import com.hyq.hm.hyperlandmark.hander.TensorFlowObjectDetectionAPIModel;
import com.hyq.hm.hyperlandmark.hander.env.BorderedText;
import com.hyq.hm.hyperlandmark.hander.env.ImageUtils;
import com.hyq.hm.hyperlandmark.hander.env.Logger;
import com.hyq.hm.hyperlandmark.hander.tracking.MultiBoxTracker;
import com.hyq.hm.hyperlandmark.tools.CameraOverlap;
import com.hyq.hm.hyperlandmark.tools.EGLUtils;
import com.hyq.hm.hyperlandmark.tools.GLBitmap;
import com.hyq.hm.hyperlandmark.tools.GLFrame;
import com.hyq.hm.hyperlandmark.tools.GLFramebuffer;
import com.hyq.hm.hyperlandmark.tools.GLPoints;
import com.hyq.hm.hyperlandmark.view.MySurfaceView;
import com.hyq.hm.hyperlandmark.view.ecogallery.EcoGallery;
import com.hyq.hm.hyperlandmark.view.ecogallery.EcoGalleryAdapterView;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;

import zeusees.tracking.Face;
import zeusees.tracking.FaceTracking;

public class MainActivity extends AppCompatActivity {
    private MySurfaceView surfaceView;
    private View proView;
    public static int screenW, screenH;
    private HandlerThread mHandlerThread;
    private Handler mHandler;
    private byte[] mNv21Data;
    private CameraOverlap cameraOverlap;
    private final Object lockObj = new Object();

    private SurfaceView mSurfaceView;

    private EGLUtils mEglUtils;
    private GLFramebuffer mFramebuffer;
    private GLFrame mFrame;
    private GLPoints mPoints;
    private GLBitmap mBitmap;
    private int viewWidth;
    private int viewHeight;
    private long startTime = 0;
    private String[] denied;
    //摄像头和内存读写权限
    private String[] permissions = {Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.CAMERA};
    //106关键点
    private FaceTracking mMultiTrack106 = null;
    private boolean mTrack106 = false;
    private int index = 0;
    /**
     * 上下唇部View中的坐标点
     */
    private Point[] topPoint = new Point[13];
    private Point[] bottomPoint = new Point[13];
    private boolean isProcessingFrame = false;

    /**
     * 接收语音切换唇膏
     */
    private BroadcastReceiver receiver = new BroadcastReceiver() {
        @Override
        public void onReceive(Context context, Intent intent) {
            index = (index + 1) % AppConstance.colors.length;
        }
    };
    private IntentFilter intentFilter = new IntentFilter();

    /**
     * 底部换唇组件
     */
    private EcoGallery gallery;
    private SampleAdapter adapter;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        gallery = findViewById(R.id.gallery);
        adapter = new SampleAdapter(this);
        gallery.setAdapter(adapter);
        gallery.post(new Runnable() {
            @Override
            public void run() {
                gallery.setSelection(adapter.getCount() / 2);
            }
        });
        gallery.setOnItemSelectedListener(new EcoGalleryAdapterView.OnItemSelectedListener() {
            @Override
            public void onItemSelected(EcoGalleryAdapterView<?> parent, View view, int position, long id) {
                index = position % AppConstance.colors.length;
            }

            @Override
            public void onNothingSelected(EcoGalleryAdapterView<?> parent) {

            }
        });


        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
            ArrayList<String> list = new ArrayList<>();
            for (int i = 0; i < permissions.length; i++) {
                if (PermissionChecker.checkSelfPermission(this, permissions[i]) == PackageManager.PERMISSION_DENIED) {
                    list.add(permissions[i]);
                }
            }
            if (list.size() != 0) {
                denied = new String[list.size()];
                for (int i = 0; i < list.size(); i++) {
                    denied[i] = list.get(i);
                }
                ActivityCompat.requestPermissions(this, denied, 5);
            } else {
                init();
            }
        } else {
            init();
        }
        intentFilter.addAction(AppConstance.action);

    }

    @Override
    protected void onStart() {
        super.onStart();
        registerReceiver(receiver, intentFilter);
    }

    @Override
    protected void onStop() {
        super.onStop();
        unregisterReceiver(receiver);
    }

    public void copyFilesFromAssets(Context context, String oldPath, String newPath) {
        try {
            String[] fileNames = context.getAssets().list(oldPath);
            if (fileNames.length > 0) {
                // directory
                File file = new File(newPath);
                if (!file.mkdir()) {
                    Log.d("mkdir", "can't make folder");

                }

                for (String fileName : fileNames) {
                    copyFilesFromAssets(context, oldPath + "/" + fileName,
                            newPath + "/" + fileName);
                }
            } else {
                // file
                InputStream is = context.getAssets().open(oldPath);
                FileOutputStream fos = new FileOutputStream(new File(newPath));
                byte[] buffer = new byte[1024];
                int byteCount;
                while ((byteCount = is.read(buffer)) != -1) {
                    fos.write(buffer, 0, byteCount);
                }
                fos.flush();
                is.close();
                fos.close();
            }
        } catch (Exception e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    //初始化模型文件：将assets中的模型文件copy到手机内存中。
    void InitModelFiles() {

        String assetPath = "ZeuseesFaceTracking";
        String sdcardPath = Environment.getExternalStorageDirectory()
                + File.separator + assetPath;
        copyFilesFromAssets(this, assetPath, sdcardPath);

    }

    @Override
    public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
        if (requestCode == 5) {
            boolean isDenied = false;
            for (int i = 0; i < denied.length; i++) {
                String permission = denied[i];
                for (int j = 0; j < permissions.length; j++) {
                    if (permissions[j].equals(permission)) {
                        if (grantResults[j] != PackageManager.PERMISSION_GRANTED) {
                            isDenied = true;
                            break;
                        }
                    }
                }
            }
            if (isDenied) {
                Toast.makeText(this, "请开启权限", Toast.LENGTH_SHORT).show();
            } else {
                init();

            }
        }
        super.onRequestPermissionsResult(requestCode, permissions, grantResults);
    }

    private int[] rgbBytes = null;
    private byte[][] yuvBytes = new byte[3][];
    private int yRowStride;
    private Runnable postInferenceCallback;
    private Runnable imageConverter;

    private void init() {
        Display defaultDisplay = getWindowManager().getDefaultDisplay();
        Point point = new Point();
        defaultDisplay.getSize(point);
        screenW = point.x;
        screenH = point.y;

        proView = findViewById(R.id.proView);
        proView.post(new Runnable() {
            @Override
            public void run() {
                viewHeight = proView.getMeasuredHeight();
                viewWidth = proView.getMeasuredWidth();
            }
        });
        InitModelFiles();
        //从手机内存读取关键点模型文件
        mMultiTrack106 = new FaceTracking("/sdcard/ZeuseesFaceTracking/models");

        cameraOverlap = new CameraOverlap(this);

        mFramebuffer = new GLFramebuffer();
        mFrame = new GLFrame();
        mPoints = new GLPoints();
        mBitmap = new GLBitmap(this, R.drawable.ic_logo);
        mHandlerThread = new HandlerThread("DrawFacePointsThread");
        mHandlerThread.start();
        surfaceView = findViewById(R.id.surfaceView);
        mHandler = new Handler(mHandlerThread.getLooper());
        cameraOverlap.setPreviewCallback(new Camera.PreviewCallback() {
            @Override
            public void onPreviewFrame(byte[] data, final Camera camera) {
                final byte[] finalData = data;
                long endTime = System.currentTimeMillis();
                if (endTime - startTime < 10) {
                    return;
                }
                Log.d("-----getDataTime------>", System.currentTimeMillis() + "");
                Log.d("----------->", "data.size----->" + data.length);
                startTime = endTime;
                data = rotateYUV420Degree90(data, CameraOverlap.PREVIEW_WIDTH, CameraOverlap.PREVIEW_HEIGHT);
                synchronized (lockObj) {
                    System.arraycopy(data, 0, mNv21Data, 0, data.length);
                }

                mHandler.post(new Runnable() {
                    @Override
                    public void run() {
                        if (mEglUtils == null) {
                            return;
                        }
                        mFrame.setS(1f);
                        mFrame.setH(0f);
                        mFrame.setL(0f);

                        if (mTrack106) {
                            mMultiTrack106.FaceTrackingInit(mNv21Data, CameraOverlap.PREVIEW_WIDTH, CameraOverlap.PREVIEW_HEIGHT);
                            mTrack106 = !mTrack106;
                        } else {
                            mMultiTrack106.Update(mNv21Data, CameraOverlap.PREVIEW_WIDTH, CameraOverlap.PREVIEW_HEIGHT);
                        }
                        boolean rotate270 = cameraOverlap.getOrientation() == 270;

                        List<Face> faceActions = mMultiTrack106.getTrackingInfo();
                        Log.d("人脸数---->", faceActions.size() + "");

                        /**
                         * 获取人脸关键点中的唇部关键点点
                         */
                        setPoint(rotate270, faceActions);
                        Log.d("---getPointTime---->", System.currentTimeMillis() + "");
                        int tid = 0;
                        mFrame.drawFrame(tid, mFramebuffer.drawFrameBuffer(), mFramebuffer.getMatrix());
                        mEglUtils.swap();
                        Log.d("---drawEndTime---->", System.currentTimeMillis() + "");
                        if (faceActions.size() == 0) {
                            //未检测到人脸清屏
                            surfaceView.clearCanvas();
                        } else {
                            surfaceView.dodraw(topPoint, bottomPoint, viewWidth, viewHeight, index, 0);
                        }
                        Log.d("---surfaceEndTime---->", System.currentTimeMillis() + "");
//                        //手势
//                        if (isProcessingFrame) {
//                            LOGGER.w("Dropping frame!");
//                            return;
//                        }
//                        try {
//                            // Initialize the storage bitmaps once when the resolution is known.
//                            if (rgbBytes == null) {
//                                Camera.Size previewSize = camera.getParameters().getPreviewSize();
//                                previewHeight = previewSize.height;
//                                previewWidth = previewSize.width;
//                                rgbBytes = new int[previewWidth * previewHeight];
//                                onPreviewSizeChosen(new Size(previewSize.width, previewSize.height), 90);
//                            }
//                        } catch (final Exception e) {
//                            LOGGER.e(e, "Exception!");
//                            return;
//                        }
//
//                        isProcessingFrame = true;
//                        yuvBytes[0] = finalData;
//                        yRowStride = previewWidth;
//
//                        imageConverter =
//                                new Runnable() {
//                                    @Override
//                                    public void run() {
//                                        ImageUtils.convertYUV420SPToARGB8888(finalData, previewWidth, previewHeight, rgbBytes);
//                                    }
//                                };
//
//                        postInferenceCallback =
//                                new Runnable() {
//                                    @Override
//                                    public void run() {
//                                        camera.addCallbackBuffer(finalData);
//                                        isProcessingFrame = false;
//                                    }
//                                };
//                        processImage();

                    }
                });
            }
        });
        mSurfaceView = findViewById(R.id.surface_view);
        mSurfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
            @Override
            public void surfaceCreated(SurfaceHolder holder) {

            }

            @Override
            public void surfaceChanged(final SurfaceHolder holder, int format, final int width, final int height) {
                Log.d("=============", "surfaceChanged");
                mHandler.post(new Runnable() {
                    @Override
                    public void run() {
                        if (mEglUtils != null) {
                            mEglUtils.release();
                        }
                        mEglUtils = new EGLUtils();
                        mEglUtils.initEGL(holder.getSurface());

                        mFramebuffer.initFramebuffer();
                        mFrame.initFrame();
                        mFrame.setSize(width, height, CameraOverlap.PREVIEW_WIDTH, CameraOverlap.PREVIEW_HEIGHT);
//                        mFrame.setSize(width, height, CameraOverlap.PREVIEW_HEIGHT, CameraOverlap.PREVIEW_WIDTH);
                        mPoints.initPoints();
                        mBitmap.initFrame(CameraOverlap.PREVIEW_HEIGHT, CameraOverlap.PREVIEW_WIDTH);
                        cameraOverlap.openCamera(mFramebuffer.getSurfaceTexture());
                        mNv21Data = new byte[CameraOverlap.PREVIEW_WIDTH * CameraOverlap.PREVIEW_HEIGHT * 2];
                    }
                });

            }

            @Override
            public void surfaceDestroyed(SurfaceHolder holder) {
                mHandler.post(new Runnable() {
                    @Override
                    public void run() {
                        cameraOverlap.release();
                        mFramebuffer.release();
                        mFrame.release();
                        mPoints.release();
                        mBitmap.release();
                        if (mEglUtils != null) {
                            mEglUtils.release();
                            mEglUtils = null;
                        }
                    }
                });

            }
        });
        if (mSurfaceView.getHolder().getSurface() != null && mSurfaceView.getWidth() > 0) {
            mHandler.post(new Runnable() {
                @Override
                public void run() {
                    if (mEglUtils != null) {
                        mEglUtils.release();
                    }
                    mEglUtils = new EGLUtils();
                    mEglUtils.initEGL(mSurfaceView.getHolder().getSurface());
                    mFramebuffer.initFramebuffer();
                    mFrame.initFrame();
                    mFrame.setSize(mSurfaceView.getWidth(), mSurfaceView.getHeight(), CameraOverlap.PREVIEW_HEIGHT, CameraOverlap.PREVIEW_WIDTH);
                    mPoints.initPoints();
                    mBitmap.initFrame(CameraOverlap.PREVIEW_HEIGHT, CameraOverlap.PREVIEW_WIDTH);
                    cameraOverlap.openCamera(mFramebuffer.getSurfaceTexture());
                }
            });
        }
    }

    /**
     * 获取唇部关键点
     *
     * @param rotate270
     * @param faceActions
     */
    private void setPoint(boolean rotate270, List<Face> faceActions) {
        float[] on_mouth_points;
        float[] down_mouth_points;
        for (Face r : faceActions) {
            on_mouth_points = new float[106 * 2];
            down_mouth_points = new float[106 * 2];

            for (int i = 0; i < 106; i++) {
                int x;
                if (rotate270) {
                    x = r.landmarks[i * 2];
                } else {
                    x = CameraOverlap.PREVIEW_WIDTH - r.landmarks[i * 2];
                }
                int y = r.landmarks[i * 2 + 1];
                //上嘴唇点
                if (i == 45) {
                    on_mouth_points[0] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    on_mouth_points[1] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
//                                    topPoint[0] = topPoint[12] = new Point((int)on_mouth_points[0], (int)on_mouth_points[1]);
                    topPoint[0] = topPoint[12] = new Point(x, y);
                }
                if (i == 37) {
                    on_mouth_points[2] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    on_mouth_points[3] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
//                                    topPoint[1] = new Point((int)on_mouth_points[2], (int)on_mouth_points[3]);
                    topPoint[1] = new Point(x, y);
                }
                if (i == 39) {
                    on_mouth_points[4] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    on_mouth_points[5] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
//                                    topPoint[2] = new Point((int)on_mouth_points[4], (int)on_mouth_points[5]);
                    topPoint[2] = new Point(x, y);
                }
                if (i == 38) {
                    on_mouth_points[6] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    on_mouth_points[7] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
//                                    topPoint[3] = new Point((int)on_mouth_points[6], (int)on_mouth_points[7]);
                    topPoint[3] = new Point(x, y);
                }
                if (i == 26) {
                    on_mouth_points[8] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    on_mouth_points[9] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
//                                    topPoint[4] = new Point((int)on_mouth_points[8], (int)on_mouth_points[9]);
                    topPoint[4] = new Point(x, y);
                }
                if (i == 33) {
                    on_mouth_points[10] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    on_mouth_points[11] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
//                                    topPoint[5] = new Point((int)on_mouth_points[10], (int)on_mouth_points[11]);
                    topPoint[5] = new Point(x, y);
                }
                if (i == 50) {
                    on_mouth_points[12] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    on_mouth_points[13] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
//                                    topPoint[6] = new Point((int)on_mouth_points[12], (int)on_mouth_points[13]);
                    topPoint[6] = new Point(x, y);
                }
                if (i == 42) {
                    on_mouth_points[14] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    on_mouth_points[15] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
//                                    topPoint[7] = new Point((int)on_mouth_points[14], (int)on_mouth_points[15]);
                    topPoint[7] = new Point(x, y);
                }
                if (i == 25) {
                    on_mouth_points[16] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    on_mouth_points[17] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
//                                    topPoint[8] = new Point((int)on_mouth_points[16], (int)on_mouth_points[17]);
                    topPoint[8] = new Point(x, y);
                }
                if (i == 36) {
                    on_mouth_points[18] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    on_mouth_points[19] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
//                                    topPoint[9] = new Point((int)on_mouth_points[18], (int)on_mouth_points[19]);
                    topPoint[9] = new Point(x, y);
                }
                if (i == 40) {
                    on_mouth_points[20] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    on_mouth_points[21] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
//                                    topPoint[10] = new Point((int)on_mouth_points[20], (int)on_mouth_points[21]);
                    topPoint[10] = new Point(x, y);
                }
                if (i == 61) {
                    on_mouth_points[22] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    on_mouth_points[23] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
//                                    topPoint[11] = new Point((int)on_mouth_points[22], (int)on_mouth_points[23]);
                    topPoint[11] = new Point(x, y);
                }

                //下嘴唇点
                if (i == 45) {
                    down_mouth_points[0] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    down_mouth_points[1] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
                    bottomPoint[0] = bottomPoint[12] = new Point(x, y);
                }
                if (i == 65) {
                    down_mouth_points[2] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    down_mouth_points[3] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
                    bottomPoint[1] = new Point(x, y);
                }
                if (i == 64) {
                    down_mouth_points[4] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    down_mouth_points[5] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
                    bottomPoint[2] = new Point(x, y);
                }
                if (i == 32) {
                    down_mouth_points[6] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    down_mouth_points[7] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
                    bottomPoint[3] = new Point(x, y);
                }
                if (i == 30) {
                    down_mouth_points[8] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    down_mouth_points[9] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
                    bottomPoint[4] = new Point(x, y);
                }
                if (i == 4) {
                    down_mouth_points[10] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    down_mouth_points[11] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
                    bottomPoint[5] = new Point(x, y);
                }
                if (i == 50) {
                    down_mouth_points[12] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    down_mouth_points[13] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
                    bottomPoint[6] = new Point(x, y);
                }
                if (i == 42) {
                    down_mouth_points[14] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    down_mouth_points[15] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
                    bottomPoint[7] = new Point(x, y);
                }
                if (i == 2) {
                    down_mouth_points[16] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    down_mouth_points[17] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
                    bottomPoint[8] = new Point(x, y);
                }
                if (i == 103) {
                    down_mouth_points[18] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    down_mouth_points[19] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
                    bottomPoint[9] = new Point(x, y);
                }
                if (i == 63) {
                    down_mouth_points[20] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    down_mouth_points[21] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
                    bottomPoint[10] = new Point(x, y);
                }
                if (i == 61) {
                    down_mouth_points[22] = view2openglX(x, CameraOverlap.PREVIEW_HEIGHT);//关键点对应x的坐标
                    down_mouth_points[23] = view2openglY(y, CameraOverlap.PREVIEW_WIDTH);//关键点对应y的坐标
                    bottomPoint[11] = new Point(x, y);
                }
            }
        }
    }

    /**
     * 屏幕坐标转换为OpenGL坐标
     *
     * @param x
     * @param width
     * @return
     */
    private float view2openglX(int x, int width) {
        float centerX = width / 2.0f;
        float t = x - centerX;
        return t / centerX;
    }

    /**
     * 屏幕坐标转换为OpenGL坐标
     *
     * @param y
     * @param height
     * @return
     */
    private float view2openglY(int y, int height) {
        float centerY = height / 2.0f;
        float s = centerY - y;
        return s / centerY;
    }


    private byte[] rotateYUV420Degree90(byte[] data, int imageWidth, int imageHeight) {
        byte[] yuv = new byte[imageWidth * imageHeight * 3 / 2];
        // Rotate the Y luma
        int i = 0;
        for (int x = 0; x < imageWidth; x++) {
            for (int y = imageHeight - 1; y >= 0; y--) {
                yuv[i] = data[y * imageWidth + x];
                i++;
            }
        }
        // Rotate the U and V color components
        i = imageWidth * imageHeight * 3 / 2 - 1;
        for (int x = imageWidth - 1; x > 0; x = x - 2) {
            for (int y = 0; y < imageHeight / 2; y++) {
                yuv[i] = data[(imageWidth * imageHeight) + (y * imageWidth) + x];
                i--;
                yuv[i] = data[(imageWidth * imageHeight) + (y * imageWidth) + (x - 1)];
                i--;
            }
        }
        return yuv;
    }

    private static final float TEXT_SIZE_DIP = 10;
    private BorderedText borderedText;
    private static final int TF_OD_API_INPUT_SIZE = 300;

    private enum DetectorMode {
        TF_OD_API, MULTIBOX, YOLO;
    }

    private static final String YOLO_MODEL_FILE = "file:///android_asset/graph-tiny-yolo-voc.pb";
    private static final int YOLO_INPUT_SIZE = 416;
    private static final String YOLO_INPUT_NAME = "input";
    private static final String YOLO_OUTPUT_NAMES = "output";
    private static final int YOLO_BLOCK_SIZE = 32;
    private static final DetectorMode MODE = DetectorMode.TF_OD_API;
    private static final int MB_INPUT_SIZE = 224;
    private static final int MB_IMAGE_MEAN = 128;
    private static final float MB_IMAGE_STD = 128;
    private static final String MB_INPUT_NAME = "ResizeBilinear";
    private static final String MB_OUTPUT_LOCATIONS_NAME = "output_locations/Reshape";
    private static final String MB_OUTPUT_SCORES_NAME = "output_scores/Reshape";
    private static final String MB_MODEL_FILE = "file:///android_asset/multibox_model.pb";
    private static final String MB_LOCATION_FILE =
            "file:///android_asset/multibox_location_priors.txt";

    private static final String TF_OD_API_MODEL_FILE =
            "file:///android_asset/hand.pb";
    private static final String TF_OD_API_LABELS_FILE = "file:///android_asset/hand_label.txt";
    private static final Logger LOGGER = new Logger();
    protected int previewWidth = 0;
    protected int previewHeight = 0;
    private Integer sensorOrientation;
    private long lastProcessingTimeMs;
    private Bitmap rgbFrameBitmap = null;
    private Bitmap croppedBitmap = null;
    private Bitmap cropCopyBitmap = null;
    private Matrix frameToCropTransform;
    private Matrix cropToFrameTransform;
    private static final boolean MAINTAIN_ASPECT = MODE == DetectorMode.YOLO;
    private OverlayView trackingOverlay;
    private Classifier detector;
    private MultiBoxTracker tracker;

    public void onPreviewSizeChosen(final Size size, final int rotation) {
        final float textSizePx =
                TypedValue.applyDimension(
                        TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
        borderedText = new BorderedText(textSizePx);
        borderedText.setTypeface(Typeface.MONOSPACE);

        tracker = new MultiBoxTracker(this);

        int cropSize = TF_OD_API_INPUT_SIZE;

        try {
            detector = TensorFlowObjectDetectionAPIModel.create(
                    getAssets(), TF_OD_API_MODEL_FILE, TF_OD_API_LABELS_FILE, TF_OD_API_INPUT_SIZE);
            cropSize = TF_OD_API_INPUT_SIZE;
        } catch (final IOException e) {
            LOGGER.e(e, "Exception initializing classifier!");
            Toast toast =
                    Toast.makeText(
                            getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT);
            toast.show();
            finish();
        }

        previewWidth = size.getWidth();
        previewHeight = size.getHeight();

        sensorOrientation = rotation - getScreenOrientation();
        LOGGER.i("Camera orientation relative to screen canvas: %d", sensorOrientation);

        LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
        rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
        croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Bitmap.Config.ARGB_8888);

        frameToCropTransform =
                ImageUtils.getTransformationMatrix(
                        previewWidth, previewHeight,
                        cropSize, cropSize,
                        sensorOrientation, MAINTAIN_ASPECT);

        cropToFrameTransform = new Matrix();
        frameToCropTransform.invert(cropToFrameTransform);

    }

    protected int getScreenOrientation() {
        switch (getWindowManager().getDefaultDisplay().getRotation()) {
            case Surface.ROTATION_270:
                return 270;
            case Surface.ROTATION_180:
                return 180;
            case Surface.ROTATION_90:
                return 90;
            default:
                return 0;
        }
    }

    private long timestamp = 0;
    private boolean computingDetection = false;
    private byte[] luminanceCopy;
    private HandlerThread handlerThread;
    // Minimum detection confidence to track a detection.
    private static final float MINIMUM_CONFIDENCE_TF_OD_API = 0.6f;
    private static final float MINIMUM_CONFIDENCE_MULTIBOX = 0.1f;
    private static final float MINIMUM_CONFIDENCE_YOLO = 0.25f;

    protected void processImage() {
        ++timestamp;
        final long currTimestamp = timestamp;
        byte[] originalLuminance = yuvBytes[0];
        tracker.onFrame(
                previewWidth,
                previewHeight,
                yRowStride,
                sensorOrientation,
                yuvBytes[0],
                timestamp);
        trackingOverlay.postInvalidate();

        // No mutex needed as this method is not reentrant.
        if (computingDetection) {
            readyForNextImage();
            return;
        }
        computingDetection = true;
        LOGGER.i("Preparing image " + currTimestamp + " for detection in bg thread.");

        rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight);

        if (luminanceCopy == null) {
            luminanceCopy = new byte[originalLuminance.length];
        }
        System.arraycopy(originalLuminance, 0, luminanceCopy, 0, originalLuminance.length);
        readyForNextImage();

        final Canvas canvas = new Canvas(croppedBitmap);
        canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);

        runInBackground(
                new Runnable() {
                    @Override
                    public void run() {
                        LOGGER.i("Running detection on image " + currTimestamp);
                        final long startTime = SystemClock.uptimeMillis();
                        final List<Classifier.Recognition> results = detector.recognizeImage(croppedBitmap);
                        lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;

                        cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
                        final Canvas canvas = new Canvas(cropCopyBitmap);
                        final Paint paint = new Paint();
                        paint.setColor(Color.RED);
                        paint.setStyle(Paint.Style.STROKE);
                        paint.setStrokeWidth(2.0f);

                        float minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
                        switch (MODE) {
                            case TF_OD_API:
                                minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
                                break;
                            case MULTIBOX:
                                minimumConfidence = MINIMUM_CONFIDENCE_MULTIBOX;
                                break;
                            case YOLO:
                                minimumConfidence = MINIMUM_CONFIDENCE_YOLO;
                                break;
                        }

                        final List<Classifier.Recognition> mappedRecognitions =
                                new LinkedList<Classifier.Recognition>();

                        for (final Classifier.Recognition result : results) {
                            final RectF location = result.getLocation();


                            if (location != null && result.getConfidence() >= minimumConfidence) {
                                Log.d("location.left------->", location.left + "");

                                canvas.drawRect(location, paint);

                                cropToFrameTransform.mapRect(location);
                                result.setLocation(location);
                                mappedRecognitions.add(result);
                            }
                        }

                        tracker.trackResults(mappedRecognitions, luminanceCopy, currTimestamp);
                        trackingOverlay.postInvalidate();
                        computingDetection = false;
                    }
                });
    }

    protected void readyForNextImage() {
        if (postInferenceCallback != null) {
            postInferenceCallback.run();
        }
    }

    protected int[] getRgbBytes() {
        imageConverter.run();
        return rgbBytes;
    }

    @Override
    public synchronized void onResume() {
        LOGGER.d("onResume " + this);
        super.onResume();

        handlerThread = new HandlerThread("inference");
        handlerThread.start();
        handler = new Handler(handlerThread.getLooper());
    }

    @Override
    public synchronized void onPause() {
        LOGGER.d("onPause " + this);

        if (!isFinishing()) {
            LOGGER.d("Requesting finish");
            finish();
        }

        handlerThread.quitSafely();
        try {
            handlerThread.join();
            handlerThread = null;
            handler = null;
        } catch (final InterruptedException e) {
            LOGGER.e(e, "Exception!");
        }

        super.onPause();
    }

    private Handler handler;

    protected synchronized void runInBackground(final Runnable r) {
        if (handler != null) {
            handler.post(r);
        }
    }
}
