package com.morristaedt.mirror.modules.emotion;

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.os.SystemClock;
import android.renderscript.Allocation;
import android.util.Log;
import android.view.Display;
import android.view.WindowManager;

import com.affectiva.android.affdex.sdk.Frame;
import com.affectiva.android.affdex.sdk.detector.Detector;
import com.affectiva.android.affdex.sdk.detector.FrameDetector;
import com.google.gson.GsonBuilder;
import com.megvii.cloud.http.CommonOperate;
import com.megvii.cloud.http.Response;
import com.morristaedt.mirror.bean.FacePlusResult;
import com.morristaedt.mirror.utils.BitmapUtils;

import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import static com.morristaedt.mirror.utils.BitmapUtils.renderScriptNV21ToRGBA888;

public class FaceModule implements Runnable {

    private static final String TAG = "FaceModule";
    private String key = "IsnHBQ6P2AS_pOnMBX57Vw0urK3jJisw";//api_key
    private String secret = "gNJPLvPQcBs8KiY5EfWqzLVgx90AaHe4";//api_secret

    private CameraHelper cameraHelper;
    private FrameDetector frameDetector;
    private SurfaceTexture surfaceTexture;
    private ExecutorService excService = Executors.newCachedThreadPool();
    private ExecutorService faceService = Executors.newCachedThreadPool();
    private Context context;
    private FaceDetectedListener mFaceListener;
    private final CommonOperate commonOperate;
    private boolean needStop = false;

    public FaceModule(Context context, FaceDetectedListener faceListener) {
        this.context = context;
        surfaceTexture = new SurfaceTexture(0); // a dummy texture
        frameDetector = new FrameDetector(context, 1, Detector.FaceDetectorMode.LARGE_FACES);
        mFaceListener = faceListener;
        // 检测参数设置
//        frameDetector.setDetectValence(true);
        frameDetector.setImageListener(faceListener);
        frameDetector.setFaceListener(faceListener);
        frameDetector.setDetectAllEmotions(true);
        frameDetector.setDetectAllAppearances(true);
        frameDetector.setDetectAllEmojis(true);
        frameDetector.setDetectAllExpressions(true);
        commonOperate = new CommonOperate(key, secret, false);
    }

    /**
     * 开始进行人脸识别检测
     */
    public void startDetection() {
        excService.execute(this);
    }

    /**
     * 停止检测
     */
    public void stopDetection() {
        needStop = true;
        faceService.shutdown();
        Log.d(TAG, "stopping background processing of frames");
        cameraHelper.stop(); // stops previewing
        cameraHelper.release();
        if (frameDetector.isRunning()) {
            try {
                frameDetector.stop();
            } catch (Exception e) {
                Log.e(TAG, e.getMessage());
            }
        }
        frameDetector.setDetectAllEmotions(false);
        frameDetector.setDetectAllExpressions(false);
        frameDetector.setDetectAllAppearances(false);
        frameDetector.setDetectAllEmojis(false);
        excService.shutdown();
    }

    /**
     * A listener for CameraHelper callbacks
     */
    private class CameraHelperListener implements CameraHelper.Listener {
        private static final float TIMESTAMP_DELTA = .06f;
        private float FACE_PLUS_TIMESTAMP_DELTA = 1.8f;
        private float lastTimestamp = -1f;
        private float face_lastTimestamp = -1f;

        private byte[] frameData;
        private int bm_width, bm_height = 0;

        private boolean isRunning = false;
        // face++ api 请求参数（请求返回的信息）
        String request_attr = "gender,age,smiling,headpose,facequality,blur,eyestatus,emotion,ethnicity,beauty,mouthstatus,eyegaze,skinstatus";


        @Override
        public void onFrameAvailable(byte[] frame, int width, int height, Frame.ROTATE rotation) {
            float timeStamp = (float) SystemClock.elapsedRealtime() / 1000f;
            if (timeStamp > (lastTimestamp + TIMESTAMP_DELTA)) {
                lastTimestamp = timeStamp;
                frameDetector.process(createFrameFromData(frame, width, height, rotation), timeStamp);
            }
            // 调用Face++ api
            if (!isRunning) {
                isRunning = true;
                needStop = false;
                excService.execute(facePlusTask);
            }
        }

        @Override
        public void onFrameSizeSelected(int width, int height, Frame.ROTATE rotation) {
        }

        private Frame createFrameFromData(byte[] frameData, int width, int height, Frame.ROTATE rotation) {
            Frame.ByteArrayFrame frame = new Frame.ByteArrayFrame(frameData, width, height, Frame.COLOR_FORMAT.YUV_NV21);
            this.frameData = frameData;
            bm_width = width;
            bm_height = height;
            frame.setTargetRotation(rotation);
            return frame;
        }

        /**
         * face++ api调用子线程
         */
        private Runnable facePlusTask = new Runnable() {
            @Override
            public void run() {
                while (!needStop) {

                    float timeStamp = (float) SystemClock.elapsedRealtime() / 1000f;
                    if (timeStamp < (face_lastTimestamp + FACE_PLUS_TIMESTAMP_DELTA)) {
                        continue;
                    }
                    face_lastTimestamp = timeStamp;
                    if (frameData == null) {
                        continue;
                    }
                    Bitmap bitmap = Bitmap.createBitmap(bm_width, bm_height, Bitmap.Config.ARGB_8888);
                    Allocation bmData = renderScriptNV21ToRGBA888(context, bm_width, bm_height, frameData);
                    bmData.copyTo(bitmap);
                    if (bitmap == null) {
                        continue;
                    }
                    try {
                        Response response = commonOperate.detectByte(BitmapUtils.getBitmapData(bitmap), 0, request_attr);

                        String res = new String(response.getContent());
                        FacePlusResult result = new GsonBuilder().create().fromJson(res, FacePlusResult.class);
                        if (mFaceListener != null) {
                            mFaceListener.onFacePlusResponse(result);
                        }
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                }
            }
        };
    }

    public interface FaceDetectedListener extends FrameDetector.ImageListener, FrameDetector.FaceListener {
        void onFacePlusResponse(FacePlusResult facePlus);
    }

    @Override
    public void run() {
        Log.d(TAG, "starting background processing of frames");
        try {
            Display display = ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
            cameraHelper = new CameraHelper(context, display, new CameraHelperListener());
            frameDetector.start();
            //noinspection deprecation
            cameraHelper.acquire(Camera.CameraInfo.CAMERA_FACING_FRONT);
            cameraHelper.start(surfaceTexture); // initiates previewing 开启摄像头，但隐藏预览界面
        } catch (IllegalStateException e) {
            Log.d(TAG, "couldn't open camera: " + e.getMessage());
        }
    }


}
