package org.opencv.samples.facedetect;

import android.app.Activity;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.hardware.Camera;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.widget.ImageView;


import org.opencv.android.Utils;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import org.opencv.samples.tutorial1.R;

import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.util.List;
import java.util.logging.Logger;


public class KRCameraView extends SurfaceView implements SurfaceHolder.Callback {

    private static final String TAG = "KRCameraView";

    public static final int CHANNEL_PAY = 10;//刷脸支付
    public static final int CHANNEL_TAKE_FOOD = 20;//刷脸取餐

    static {
        System.loadLibrary("native-lib");
    }

    private Bitmap bitmap2;
    private Mat image;
    private int channel = CHANNEL_PAY;
    private int degrees;
    private SurfaceHolder mSurfaceHolder;
    private Camera camera;
    public static Mat ImageCache;
    private Activity parentActivity;
    private boolean open;//是否开始捕获识别
    private float mRelativeFaceSize = 0.2f;
    private int mAbsoluteFaceSize = 0;
    private MatOfRect objects;

    private Mat mMatChanged;
    private Bitmap mBitmapChanged;
    private SurfaceHolder mSurfaceHodlerChanged;

    private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
    private Camera.PreviewCallback GetPreviewCallBackInstance = new Camera.PreviewCallback() {
        CascadeClassifier fier = null;

        @Override
        public void onPreviewFrame(byte[] data, Camera camera) {
            if (camera == null || !open)
                return;
            if (fier == null) {
                fier = new CascadeClassifier();
                InputStream is = getResources().openRawResource(R.raw.lbpcascade_frontalface);
                File cascadeDir = KRCameraView.this.getContext().getDir("cascade", Context.MODE_PRIVATE);
                File cascadeFile = new File(cascadeDir, "lbpcascade_frontalfacev2.xml");
                if (!cascadeFile.exists()) {
                    try {
                        FileOutputStream os = new FileOutputStream(cascadeFile);
                        byte[] buffer = new byte[4096];
                        int bytesRead;
                        while ((bytesRead = is.read(buffer)) != -1) {
                            os.write(buffer, 0, bytesRead);
                        }
                        is.close();
                        os.close();
                    } catch (Exception ex) {
                        Log.e("test", ex.toString());
                    }
                }
                boolean loadresult = fier.load(cascadeFile.getAbsolutePath());
                mMatChanged = new Mat();
            }

            Camera.Parameters param = camera.getParameters();
            Camera.Size siz = param.getPreviewSize();
            int format = param.getPreviewFormat();
            if (format == ImageFormat.NV21) {
                camera.setPreviewCallback(null);
                // CameraのデータはYUV形式なので、それをMatに変換する
                Camera.Size cameraSize = camera.getParameters().getPictureSize();
                Mat matBase = convertYuv2Mat(data, cameraSize.width, cameraSize.height);

                // GrayScaleに変換する
                Imgproc.cvtColor(matBase, mMatChanged, Imgproc.COLOR_BGR2GRAY);

                // 顔を検出する
                MatOfRect faces = new MatOfRect();
                int absoluteFaceSize = 0;

                fier.detectMultiScale(
                        mMatChanged
                        , faces
                        , 1.1
                        , 2
                        , 2 // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                        , new Size(absoluteFaceSize, absoluteFaceSize)
                        , new Size());

                // 検出結果を描画する
                org.opencv.core.Rect[] facesArray = faces.toArray();
                Scalar faceRectColor = new Scalar(255, 255, 255, 255);
                for (int i = 0; i < facesArray.length; i++) {
                    Imgproc.rectangle(mMatChanged, facesArray[i].tl(), facesArray[i].br(), faceRectColor, 3);
                }

                // 変換結果を描画する
                Utils.matToBitmap(mMatChanged, mBitmapChanged);
                drawBitmap(mBitmapChanged, mSurfaceHodlerChanged);

                camera.setPreviewCallback(this);

            }
            camera.setOneShotPreviewCallback(GetPreviewCallBackInstance);
        }

        // 指定したSurfaceにフィットするようBitmapを描画する
        private void drawBitmap(Bitmap target, SurfaceHolder holder) {
            if(mIvImage!= null){
                mIvImage.setImageBitmap(target);
            }
        }
    };

    public static Mat convertYuv2Mat(byte[] data, int width, int height) {

        Mat yuvMat = new Mat(height + height / 2, width, CvType.CV_8UC1);
        yuvMat.put(0, 0, data);
        Mat bmpMat = new Mat();
        Imgproc.cvtColor(yuvMat, bmpMat, Imgproc.COLOR_YUV420sp2RGB, 4);

        return bmpMat;
    }

    /* * Camera.PreviewCallback.onPreviewFrame で渡されたデータを Bitmap に変換します。
     *
     * @param data
     * @param width
     * @param height
     * @param degrees
     * @return
     */
    private int[] rgb;
    private Bitmap bitmap;

    private Bitmap decode(byte[] data, int width, int height, int degrees) {
        if (rgb == null) {
            rgb = new int[width * height];
        }

        final int frameSize = width * height;
        for (int j = 0, yp = 0; j < height; j++) {
            int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
            for (int i = 0; i < width; i++, yp++) {
                int y = (0xff & ((int) data[yp])) - 16;
                if (y < 0) y = 0;
                if ((i & 1) == 0) {
                    v = (0xff & data[uvp++]) - 128;
                    u = (0xff & data[uvp++]) - 128;
                }

                int y1192 = 1192 * y;
                int r = (y1192 + 1634 * v);
                int g = (y1192 - 833 * v - 400 * u);
                int b = (y1192 + 2066 * u);

                if (r < 0) r = 0;
                else if (r > 262143) r = 262143;
                if (g < 0) g = 0;
                else if (g > 262143) g = 262143;
                if (b < 0) b = 0;
                else if (b > 262143) b = 262143;

                rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
            }
        }

        if (degrees == 90) {
            int[] rotatedData = new int[rgb.length];
            for (int y = 0; y < height; y++) {
                for (int x = 0; x < width; x++) {
                    rotatedData[x * height + height - y - 1] = rgb[x + y * width];
                }
            }
            int tmp = width;
            width = height;
            height = tmp;
            rgb = rotatedData;
        }

        if (bitmap == null) {
            bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
        }

        bitmap.setPixels(rgb, 0, width, 0, 0, width, height);
        return bitmap;
    }

    private boolean canInitCamera;
    private ImageView mIvImage;

    public KRCameraView(Context context) {
        super(context);
        Init();
    }

    public KRCameraView(Context context, AttributeSet attrs) {
        super(context, attrs);
        Init();
        //获取识别用途
        TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.kCameraView, 0, 0);
        channel = a.getInt(R.styleable.kCameraView_cameraChannel, CHANNEL_PAY);
        a.recycle();
    }

    private void Init() {
        this.parentActivity = (Activity) this.getContext();
        mSurfaceHolder = this.getHolder();
        mSurfaceHolder.addCallback(this);
    }

    @Override
    public void surfaceCreated(SurfaceHolder holder) {
        Log.e(TAG, "surface Created:" + holder);
        canInitCamera = true;

        //initFace();
        if (open) {
            start();
        }
    }

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {

    }

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {
        Log.e(TAG, "surface Destroyed");
        canInitCamera = false;
        stop();
    }

    private void initFace() {
        try {
            this.camera = Camera.open();
            CameraManager manager = (CameraManager) this.getContext().getSystemService(Context.CAMERA_SERVICE);
            String[] camidlist = manager.getCameraIdList();
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(camidlist[0]);
            int orientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
            Camera.Parameters par = camera.getParameters();
            //par.setPreviewSize(100,100);

           /* List<Camera.Size> sizeList = par.getSupportedPreviewSizes();//获取所有支持的camera尺寸
            Log.e("jxd", "optionSize : mSurfaceView " + getWidth() + " * " + getHeight());
            Camera.Size optionSize = getOptimalPreviewSize(sizeList, getHeight(), getWidth());//获取一个最为适配的camera.size
            Log.e("jxd", "optionSize : " + optionSize.width + " * " + optionSize.height);
            par.setPreviewSize(optionSize.width, optionSize.height);//把camera.size赋值到parameters*/
            Camera.Size cameraSize = camera.getParameters().getPictureSize();
            mBitmapChanged = Bitmap.createBitmap(cameraSize.width, cameraSize.height, Bitmap.Config.ARGB_8888);

            par.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
            camera.setParameters(par);
            camera.setDisplayOrientation(orientation);
            camera.setPreviewDisplay(mSurfaceHolder);
//            camera.setOneShotPreviewCallback(GetPreviewCallBackInstance);
//            camera.startPreview();
        } catch (Exception ex) {
            System.out.print(ex.toString());
        }
    }

    /**
     * 解决预览变形问题
     *
     * @param sizes
     * @param w
     * @param h
     * @return
     */
    private Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes, int w, int h) {
        final double ASPECT_TOLERANCE = 0.1;
        double targetRatio = (double) w / h;
        if (sizes == null) return null;

        Camera.Size optimalSize = null;
        double minDiff = Double.MAX_VALUE;

        int targetHeight = h;

        // Try to find an size match aspect ratio and size
        for (Camera.Size size : sizes) {
            double ratio = (double) size.width / size.height;
            if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
            if (Math.abs(size.height - targetHeight) < minDiff) {
                optimalSize = size;
                minDiff = Math.abs(size.height - targetHeight);
            }
        }

        // Cannot find the one match the aspect ratio, ignore the requirement
        if (optimalSize == null) {
            minDiff = Double.MAX_VALUE;
            for (Camera.Size size : sizes) {
                if (Math.abs(size.height - targetHeight) < minDiff) {
                    optimalSize = size;
                    minDiff = Math.abs(size.height - targetHeight);
                }
            }
        }
        return optimalSize;
    }


    //释放摄像机
    private void releaseCamera() {
        Log.e(TAG, "releaseCamera");
        if (this.camera != null) {
            synchronized (this.camera) {
                if (this.camera != null) {
//                this.camera.stopPreview();
                    this.camera.release();
                    this.camera = null;
                }
            }
        }
    }


    private int status = 0;

    /**
     * 停止捕获图像，停止识别，并释放相机
     */
    public void stop() {
        Log.e(TAG, "stopCamera:" + status);
        open = false;
        /*
        synchronized (this.camera) {
            if (this.camera != null) {
                this.camera.stopPreview();
                this.camera.release();
                this.camera = null;
            }
        }
        */

        if (status == 1) {
            this.camera.stopPreview();
            status = 0;

        }
        releaseCamera();
    }

    /**
     * 打开相机，开始捕获图像并识别
     */
    public void start() {
        Log.e(TAG, "startCapture:" + status);
        open = true;
        if (camera == null) {
            if (canInitCamera) {//可以初始化camera
                initFace();
            } else {
                //还未可以初始化
                return;
            }
        }
        if (camera == null)
            return;
        //已经初始化相机
        if (status == 0) {
            //开启预览
            try {
                camera.setOneShotPreviewCallback(GetPreviewCallBackInstance);
                camera.startPreview();
                status = 1;
            } catch (Exception e) {

            }
        }
    }

    public void setImageView(ImageView viewById) {
        mIvImage = viewById;
    }
}

