package net.sunzc.takephoto;

import android.content.Context;
import android.graphics.Bitmap;
import android.text.TextUtils;
import com.hanvon.faceRec.FaceCoreHelper;
import com.hanvon.faceRec.HWFaceLib;
import com.hanvon.faceRec.OUserInfo;
import net.sunzc.hwfacetest.utils.FileUtils;
import net.sunzc.takephoto.utils.MyLog;

import java.util.Arrays;

/**
 * 人脸识别
 * 获取服务器中此用户的特征
 * 给用户拍照
 * 读取用户照片的特征
 * 比较特征值
 * 判断得分
 * Created by Administrator on 2017/1/16.
 */
public class FaceRecognizer extends HWFace {
    private static final String TAG = "FaceRecognizer";
    private byte[] featureFileArray;
    private static FaceRecognizer recognizer;

    public static FaceRecognizer getRecognize(Context context, ICamera camera, String featureData) {
        if (recognizer == null) {
            recognizer = new FaceRecognizer(context, camera, featureData);
        }
        return recognizer;
    }

    private FaceRecognizer(Context context, ICamera camera, String featureData) {
        super(context, camera);
        if (TextUtils.isEmpty(featureData)) return;
        FileUtils.writeNew2Binary(featureData, getFeatureFile());
        featureFileArray = FileUtils.file2Byte(getFeatureFile());
        if (featureFileArray != null) {
            FaceCoreHelper.InitFaceEngine(featureFileArray, featureFileArray.length, FaceCoreHelper.SERIAL_NUMBER);
            int[] num = new int[1];
            FaceCoreHelper.GetUserIDCount(num);
            int[] id = new int[1];
            FaceCoreHelper.GetUserIDbyIndex(1, id);
            MyLog.i(TAG, "获取到用户数:" + num[0] + "---id:" + id[0]);
        }
    }

    private byte[] writeFaceFeature(String faceFeature) {
        if (TextUtils.isEmpty(faceFeature)) {
            return null;
        }
//        faceFeature = FileUtil.readTxtFile("/sdcard/test.txt");
        MyLog.i(TAG, "featureFile是否存在:" + getFeatureFile().exists());
        FileUtils.copyFile("/sdcard/feature.dat", getFeatureFile().getAbsolutePath());
        byte[] featureArray = new byte[0];
        featureArray = FileUtils.file2Byte(getFeatureFile());
        MyLog.i(TAG, "人脸特征数据arrayLength:" + featureArray.length + "字符串长度:" + faceFeature.length());
//        FileUtil.writeFile(getFeatureFile(), featureArray);
        return featureArray;
    }

    public void recognizeFace() {
        checkFace(new FaceCheckListener() {
            @Override
            public void onCheckSuccess(Bitmap bitmap, byte[] yuvImageData, int[] facePos) {
//                facePos = new int[]{0, 287, 192, 128, 128, 259, 165, 325, 157, 38, 1, 0};
//                yuvImageData = FileUtil.file2Byte(new File("/sdcard/recImage"));
                FileUtils.writeFile("/sdcard/takePhotoRecImage", yuvImageData);
                int[] userId = new int[1];
                if (isSamePeople(yuvImageData, mCameraSize.width, mCameraSize.height, facePos, userId)) {
                    OUserInfo userInfo = getUserInfoByID(userId);
                    ((RecognizeListener) mFaceListener).onRecognizeSuccess(FaceRecognizer.this, bitmap, userInfo);
                } else {
                    ((RecognizeListener) mFaceListener).onRecognizeFailure(FaceRecognizer.this);
                }
            }

            @Override
            public void onCheckFailure(Bitmap bitmap) {
                if (mFaceListener != null && mFaceListener instanceof RecognizeListener)
                    ((RecognizeListener) mFaceListener).onRecognizeFailure(FaceRecognizer.this);
            }
        });
    }

    private OUserInfo getUserInfoByID(int[] userId) {
        OUserInfo showUserInfo = new OUserInfo();
        showUserInfo.strId = userId[0];
        int[] facePos = new int[12 * 16];
        byte[] photoByteArray = new byte[mCameraSize.width * mCameraSize.height * 2 * TEMPLATE_SIZE];
        byte[] name = new byte[32];
        int[] pnIndex = new int[1];
        int[] photoNum = new int[1];
        int[] width = new int[1];
        int[] height = new int[1];
        FaceCoreHelper.GetUserInfobyID(
                showUserInfo.strId,
                photoByteArray,
                photoNum,
                width, height, facePos, name, pnIndex);
        showUserInfo.pbImageArray = photoByteArray;
        showUserInfo.pFacePos = facePos;
        showUserInfo.nImgNum = photoNum[0];
        showUserInfo.strName = FileUtils.getStringFromBytes(name);
        showUserInfo.nWidth = width[0];
        showUserInfo.nHeight = height[0];
        return showUserInfo;
    }

    private boolean isSamePeople(byte[] yuvImageData, int width, int height, int[] facePos, int[] userId) {
        int[] pnScore = new int[1];
        int[] pnFtrPos = new int[1];
        int[] length = {featureFileArray.length};
        for (int i = yuvImageData.length / 5; i < yuvImageData.length * 0.4; i++) {
            yuvImageData[i] = 0;
        }
        HWFaceLib.srcAllocBuffer = featureFileArray;
        HWFaceLib.iSrcAllocSize = featureFileArray.length;
        int result = HWFaceLib.getInstance().FaceRecognition(yuvImageData,
                width, height,
                facePos,
                userId,
                pnScore,
                pnFtrPos
        );
        MyLog.i("HWFaceLib", "需要识别的图片尺寸:" + yuvImageData.length +
                "\n识别结果:" + result +
                "\n图片宽度:" + width +
                "\n图片高度:" + height +
                "\n人脸位置:" + Arrays.toString(facePos) +
                "\n得分" + pnScore[0] +
                "\n用户id:" + userId[0] +
                "\n模板位置：" + pnFtrPos[0] +
                "\n缓冲区尺寸:" + featureFileArray.length +
                "\n缓冲区内容:" + Arrays.toString(featureFileArray)
        );
        return result == FaceCoreHelper.HW_OK;
    }

    public interface RecognizeListener extends IFace {
        void onRecognizeFailure(FaceRecognizer faceRecognizer);

        void onRecognizeSuccess(FaceRecognizer faceRecognizer, Bitmap bitmap, OUserInfo userInfo);
    }
}
