/*
 * Copyright (C) 2017 Baidu, Inc. All Rights Reserved.
 */
package com.baidu.aip.face;

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.YuvImage;
import android.media.FaceDetector;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;

import com.baidu.aip.config.ImageFrame;
import com.baidu.aip.face.camera.ICameraControl;

import java.io.ByteArrayOutputStream;
import java.util.ArrayList;

/**
 * 封装了人脸检测的整体逻辑。
 */
public class FaceDetectManager {
    private Context mContext;
    public static final String TAG = "FaceDetectManager";

    /**
     * 该回调用于回调，人脸检测结果。当没有人脸时，infos 为null,status为 FaceDetector.DETECT_CODE_NO_FACE_DETECTED
     */
    public interface OnFaceDetectListener {
        void onDetectFace(int status, FaceDetector.Face[] faces, MyImageFrame imageFrame, Rect cropRect);
    }

    public FaceDetectManager(Context context) {
        mContext = context;
//        Ast.getInstance().init(context.getApplicationContext(), "3.3.0.0", "facedetect");
    }

    /**
     * 图片源，获取检测图片。
     */
    private ImageSource imageSource;
    /**
     * 人脸检测事件监听器
     */
    private OnFaceDetectListener listener;
    private FaceFilter faceFilter = new FaceFilter();
    private HandlerThread processThread;
    private Handler processHandler;
    private Handler uiHandler;
    private MyImageFrame lastFrame;
    private int mPreviewDegree = 90;
    private FaceDetector mFaceDetector;
    private FaceDetector.Face[] mFaces;

    private ArrayList<FaceProcessor> preProcessors = new ArrayList<>();

    /**
     * 设置人脸检测监听器，检测后的结果会回调。
     *
     * @param listener 监听器
     */
    public void setOnFaceDetectListener(OnFaceDetectListener listener) {
        this.listener = listener;
    }

    /**
     * 设置图片帧来源
     *
     * @param imageSource 图片来源
     */
    public void setImageSource(ImageSource imageSource) {
        this.imageSource = imageSource;
    }

    /**
     * @return 返回图片来源
     */
    public ImageSource getImageSource() {
        return this.imageSource;
    }

    /**
     * 增加处理回调，在人脸检测前会被回调。
     *
     * @param processor 图片帧处理回调
     */
    public void addPreProcessor(FaceProcessor processor) {
        preProcessors.add(processor);
    }

    /**
     * 设置人检跟踪回调。
     *
     * @param onTrackListener 人脸回调
     */
    public void setOnTrackListener(FaceFilter.OnTrackListener onTrackListener) {
        faceFilter.setOnTrackListener(onTrackListener);
    }

    /**
     * @return 返回过虑器
     */
    public FaceFilter getFaceFilter() {
        return faceFilter;
    }

    public void start() {
        this.imageSource.addOnFrameAvailableListener(onFrameAvailableListener);
        processThread = new HandlerThread("process");
        processThread.setPriority(10);
        processThread.start();
        processHandler = new Handler(processThread.getLooper());
        uiHandler = new Handler();
        this.imageSource.start();
    }

    private Runnable processRunnable = new Runnable() {
        @Override
        public void run() {
            if (lastFrame == null) {
                return;
            }
            android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
            byte[] frame;
            int[] argb;
            int width;
            int height;
            synchronized (lastFrame) {
                frame = lastFrame.getFrame();
                width = lastFrame.getWidth();
                height = lastFrame.getHeight();

                lastFrame = null;
            }
            process(frame, width, height);
        }
    };

    public void stop() {
        this.imageSource.stop();
        this.imageSource.removeOnFrameAvailableListener(onFrameAvailableListener);
        if (processThread != null) {
            processThread.quit();
            processThread = null;
        }
//        Ast.getInstance().immediatelyUpload();
    }

    public void setPreviewDegree(int degree) {
        this.mPreviewDegree = degree;
    }

    private RectF detectedRect;

    public void setDetectedRect(RectF rect) {
        detectedRect = rect;

    }

    private Rect cropRect = new Rect();

    private void process(byte[] frame, int width, int height) {
        int value = 0;

        RectF originalCoordinate = new RectF();
        CameraImageSource cam = (CameraImageSource) getImageSource(); // TODO
        cam.getCameraControl().getPreviewView().mapToOriginalRect(originalCoordinate);
        if (detectedRect != null) {
            originalCoordinate.set(detectedRect);
        }
        YuvImage localYuvImage = new YuvImage(frame, ImageFormat.NV21, width, height, null);
        int screenMin = Math.min(ScreenUtils.getScreenHeight(mContext),ScreenUtils.getScreenWidth(mContext));
        int screesWidth = ScreenUtils.getScreenWidth(mContext);
        int screenHeight = ScreenUtils.getScreenHeight(mContext);
        int imageMin = Math.min(localYuvImage.getHeight(),localYuvImage.getWidth());

        int cropWidth = (int) ((int) ((originalCoordinate.right - originalCoordinate.left) * imageMin / screenMin));

        int middleHeight = localYuvImage.getHeight()/2;
        if (cam.getCameraControl().getCameraFacing() == ICameraControl.CAMERA_FACING_BACK) {
//
            float zw = ((float)screenHeight)*height/screesWidth;

            float exWid =  zw<width? (float) ((zw-width)*1.4):0;
            Log.d("111111",exWid+"");

            cropRect.top = middleHeight-cropWidth/2;
            cropRect.bottom = middleHeight+cropWidth/2;

//            float qw = width*originalCoordinate.top/ScreenUtils.getScreenHeight(mContext);

            float qw = zw*originalCoordinate.top/screenHeight;

            cropRect.left = (int) (qw-exWid);
            cropRect.right = cropRect.left+cropWidth;

        } else {
            float zw = ((float)screenHeight)*height/screesWidth;

            float exWid =  zw<width? (float) ((zw-width)*2):0;

            cropWidth = (int) (cropWidth*1.2);
            cropRect.top = middleHeight-cropWidth/2;
            cropRect.bottom = middleHeight+cropWidth/2;

//            float qw = height*originalCoordinate.top/screesWidth;

            float qw = zw*originalCoordinate.top/screenHeight;
            Log.d("111111",qw+"");
            int right = (int) (width-qw-70);
            cropRect.right = Math.min(right, width);

            cropRect.left = cropRect.right-cropWidth;

        }

        Log.d("12313","采集区域==》"+detectedRect.toString()+"\n屏幕宽度===》"+ScreenUtils.getScreenWidth(mContext)+"\n"+"屏幕高度===>"+ScreenUtils.getScreenHeight(mContext)+"\n图片宽度===》"+localYuvImage.getWidth()+"\n图片高度===>"+localYuvImage.getHeight());
//
//        Log.d("12313", "裁剪区域：" + cropRect.left + "\n" + cropRect.top + "\n" + cropRect.right + "\n" + cropRect.bottom + "\nView区域" + originalCoordinate.toString() + "\n原始区域:" + detectedRect.toString());
        ByteArrayOutputStream localByteArrayOutputStream = new ByteArrayOutputStream();
        localYuvImage.compressToJpeg(
                cropRect,
                70,
                localByteArrayOutputStream
        );
        byte[] arrayOfByte = localByteArrayOutputStream.toByteArray();
//        myImageFrame.setFrame(arrayOfByte);
//        myImageFrame.setWidth(localYuvImage.getWidth());
//        myImageFrame.setHeight(localYuvImage.getHeight());

        BitmapFactory.Options op = new BitmapFactory.Options();
        op.inPreferredConfig = Bitmap.Config.RGB_565;
        Bitmap bitmap = BitmapFactory.decodeByteArray(arrayOfByte, 0, arrayOfByte.length, op);
        Matrix matrix = new Matrix();

//        matrix.setRotate(0);
        matrix.setRotate(cam.getCameraControl().getCameraFacing() == ICameraControl.CAMERA_FACING_FRONT ? -90 : 90);
        Bitmap newBM = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, false);
        mFaces = new FaceDetector.Face[5];
        mFaceDetector = new FaceDetector(newBM.getWidth(), newBM.getHeight(), 5);
        value = mFaceDetector.findFaces(newBM, mFaces);

        if (listener != null) {
            ByteArrayOutputStream baos = new ByteArrayOutputStream();
            newBM.compress(Bitmap.CompressFormat.JPEG, 70, baos);
            byte[] data = baos.toByteArray();
            MyImageFrame myImageFrame = imageSource.borrowMyImageFrame();
            myImageFrame.setFrame(data);
            myImageFrame.setWidth(bitmap.getWidth());
            myImageFrame.setHeight(bitmap.getHeight());
            listener.onDetectFace(value, mFaces, myImageFrame, cropRect);
        }
    }

    private OnFrameAvailableListener onFrameAvailableListener = new OnFrameAvailableListener() {
        @Override
        public void onFrameAvailable(ImageFrame frame) {

        }

        @Override
        public void onFrameAvailable(MyImageFrame frame) {
            lastFrame = frame;
            processHandler.removeCallbacks(processRunnable);
            processHandler.post(processRunnable);
        }

    };

    public byte[] onPreviewFrame(final byte[] data, int srcWidth, int srcHeight) {
        // 将系统回调的数组拷贝一份,操作拷贝的数据
        byte[] dataCopy = new byte[data.length];
        System.arraycopy(data, 0, dataCopy, 0, data.length);
//        Camera.Size size = camera.getParameters().getPreviewSize();
//        final int srcWidth = size.width;
//        final int srcHeight = size.height;
        final int dstWidth = srcHeight;
        final int dstHeight = srcWidth;
        // 1.5倍的总数,多出来的部分装VU分量
        byte[] buf = new byte[dstWidth * dstHeight * 3 / 2];

        for (int i = 0; i < dstHeight; i++) {
            for (int j = 0; j < dstWidth; j++) {
                // 新数组中摆放Y值 旋转后(i,j) --> 旋转前(srcHeight-1-j, i)
                buf[i * dstWidth + j] = dataCopy[(srcHeight - 1 - j) * srcWidth + i];
                // 确认是左上角的点
                if (i % 2 == 0 && j % 2 == 0) {
                    // 摆放V值 目标行号= 行号/2 + 高
                    buf[(i / 2 + srcWidth) * dstWidth + j] = dataCopy[((srcHeight - 1 - j) / 2 + srcHeight) * srcWidth + j];
                    // 摆放U值
                    buf[(i / 2 + srcWidth) * dstWidth + j + 1] = dataCopy[((srcHeight - 1 - j) / 2 + srcHeight) * srcWidth + j + 1];
                }
            }
        }
        return buf;
    }
}
