package com.hanvon.faceRec;

import android.content.Context;
import android.graphics.*;
import net.sunzc.hwfacetest.MyApp;
import net.sunzc.hwfacetest.utils.FileUtils;
import net.sunzc.hwfacetest.utils.MyLog;

import java.io.*;
import java.util.Arrays;

/**
 * @author HuangCH
 *         修改记录：
 *         1. 2012.06.27   实现保存多张截图
 *         涉及到的函数：
 *         SaveCaptureImage
 *         ReplaceCaptureImage
 *         RemoveUserbyID
 *         GetUserInfobyIndex
 *         GetUserInfobyID
 *         新增函数：
 *         DeleteFiels
 *         GetCaptureImage
 *         <p>
 *         新增一个用户，就会生成一个该用户ID的图片目录文件夹，里面截图通过序号从小到大存储，如果有附加模板的话，
 *         会将附加模板的截图保存为：plus.jpg
 *         2. 2012.06.28
 *         更新SO至V2.19版本
 *         3. 2012.07.04
 *         SO 更新至V2.0版本
 *         FaceRecognition 增加最匹配模板标示
 *         3. 2012.07.06
 *         SO 更新至V2.1版本
 *         <p>
 *         4. 2012.08.13
 *         SO 更新至V3.2版本
 *         新增函数：
 *         AddUserWithFeatures
 */
public class HWFaceLib {


    public HWFaceLib() {
        super();
        // TODO Auto-generated constructor stub
        InitFaceEngine(FaceCoreHelper.SERIAL_NUMBER);
    }

    /**
     * 执行成功
     */
    public final static int HW_REC_SUCCESS = 1;

    public final static int HW_OK = 0;
    /**
     * 失败
     */
    public final static int HW_FAIL = -1;
    /**
     * 输入图片错误
     */
    public final static int HW_INVALID_IMG = -2;
    /**
     * 输入参数错误
     */
    public final static int HW_INVALID_ARG = -3;
    /**
     * 内存空间不够
     */
    public final static int HW_OUTOFMEMORY = -4;
    /**
     * 输入参数超出范围
     */
    public final static int HW_OUTOFRANGE = -5;
    /**
     * 没有检测到人脸
     */
    public final static int HW_DETECT_FAIL = -6;
    /**
     * 增加新用户或更新用户信息时，能定位出人脸的图片个数小于要求的模板个数。
     */
    public final static int HW_IMG_NOT_ENOUGH = -7;
    /**
     * 未检测到SD卡
     */
    public final static int HW_SDK_FILE_NOSDCARD = -8;
    /**
     * 未发现SDK参数文件夹
     */
    public final static int HW_SDK_FILE_NOSDKPATH = -9;
    /**
     * 未发现SDK参数文件
     */
    public final static int HW_SDK_FILE_NOSDKCFG = -10;
    /**
     * 未发现存特征文件夹
     */
    public final static int HW_SDK_FILE_FEATUREDIR = -11;
    /**
     * 用户信息ID错误
     */
    public final static int HW_SDK_USERINFO_ERRID = -12;
    /**
     * 获取用户信息Jpg文件错误
     */
    public final static int HW_SDK_USERINFO_JPGERR = -13;
    /**
     * 文件名错误
     */
    public final static int HW_SDK_FILE_NAMEERR = -14;
    /**
     * 用户信息不存在
     */
    public final static int HW_SDK_USERINFO_NOTEXIST = -15;
    /**
     * 模板太少
     */
    public final static int HW_SDK_TEMPLATE_LESS = -16;
    /**
     * 录入模板时提取到的有效模板数不足
     */
    public final static int HW_SDK_ADDNEWUSER_LESSTEMPLATE = -18;
    /**
     * 模板数据与设备无法匹配
     */
    public final static int HW_NOT_VALID_SERIALNUMBER = -20;
    /**
     * 图片序列的最多图片个数
     */
    public final static int USER_MAX_IMG = 16;
    /**
     * 用户名称字符串最大长度 char的个数
     */
    public final static int USER_NAME_LEN = 32;

    //	private final static String strDataFile = "/mnt/sdcard/HanvonFeature";
//	private final static String strDataFile = "/mnt/storage/sdcard/HanvonFeature";
    private final static String strDataFile2 = MyApp.getInstance().getFilesDir().getAbsolutePath();
    private final static String strDataFile = "HanvonFeature";

    public static byte[] srcAllocBuffer = null;

    public static int iSrcAllocSize = 0;

    private static String strUUID = "";

    private static HWFaceLib mInstance;

    public static HWFaceLib getInstance() {
        return mInstance == null ?
                (mInstance = new HWFaceLib()) :
                mInstance;
    }

    public static String getFileDir(Context context, String dirname) {
        File file = context.getFilesDir();
        return new File(file, dirname).getAbsolutePath();
    }

    public static String getFaceFeatureDir(Context context) {
        return getFileDir(context, "HanvonFeature");
    }

    public static String getImageDir(Context context) {
        return new File(getFaceFeatureDir(context), "ImgFiles").getAbsolutePath();
    }

    public static String getFaceFeatureFile(Context context) {
        return new File(getFaceFeatureDir(context), "hwFeature.dat").getAbsolutePath();
    }

    public static void clearFaceFeatureFile(Context context) {
        FileUtils.delFile(new File(getFaceFeatureFile(context)));
    }


    /***
     * 初始化Data数据包,同时检查目录如果未创建，则创建，并写入初始化文件
     * @param nLen        长度标示
     * @return
     */
    private int CreateInitDataFile(int nLen) {
        int nResult = 0;
        FileOutputStream out = null;
        try {
            File file = new File(strDataFile2);
            if (!file.exists()) {
                file.mkdir();
            }

            file = new File(strDataFile2 + "/" + strDataFile);
            if (!file.exists()) {
                file.mkdir();
            }
            File srcFile = new File(strDataFile2 + "/" + strDataFile + "/hwFeature.dat");

            if ((!srcFile.exists()) || (nLen == 0)) {
                // 创建初始值
                byte[] data = new byte[16];
                byte[] value = Int2Bytes(0);
                System.arraycopy(value, 0, data, 0, 4);
                eDetectionDistanceRule nDisRule;
                nDisRule = eDetectionDistanceRule.Detect_Near;
                value = Int2Bytes(nDisRule.ordinal());
                System.arraycopy(value, 0, data, 4, 4);
                eDetectionStrictRule nStrRule;
                nStrRule = eDetectionStrictRule.Detect_Common;
                value = Int2Bytes(nStrRule.ordinal());
                System.arraycopy(value, 0, data, 8, 4);
                eRecogRule nRecRule;
                nRecRule = eRecogRule.Recog_Loose;
                value = Int2Bytes(nRecRule.ordinal());
                System.arraycopy(value, 0, data, 12, 4);
                // 写文件
                try {
                    boolean bRet = srcFile.createNewFile();

                    out = new FileOutputStream(strDataFile2 + "/" + strDataFile + "/hwFeature.dat");
                    out.write(data);
                    out.flush();
                    out.close();
                } catch (Exception e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }

            }
        } catch (Exception ex) {
            nResult = HW_FAIL;
            ex.printStackTrace();
        }

        return nResult;
    }

    /**
     * 计算得分
     *
     * @param nDist[Input]
     * @return
     */
    private int ChangeDistToScore(int nDist) {
        int i;
        int iTableLen = 10;

        //const int aiScore[] = { 100 ,   95,   90,   85,   78,   70,   62,  55,  45,  35};
        //const int aiDist[]  = { 2400, 2900, 3400, 3900, 4400, 4900, 5400, 6400, 7000, 8000 };
        int aiScore[] = {100, 92, 85, 78, 70, 63, 55, 45, 35, 25};
        int aiDist[] = {2400, 2900, 3400, 3900, 4400, 4900, 5400, 6400, 7000, 8000};

        if (nDist <= aiDist[0])
            return 100;
        if (nDist >= aiDist[iTableLen - 1])
            return 25;//30;

        //求落入的区间
        for (i = 0; i < iTableLen - 1; i++) {
            if (nDist >= aiDist[i] && nDist < aiDist[i + 1])
                break;
        }
        //
        return (nDist - aiDist[i]) * (aiScore[i + 1] - aiScore[i]) / (aiDist[i + 1] - aiDist[i]) + aiScore[i];
    }


    private int FastDistance2D(int x, int y) {
        int nTempX = 0, nTempY = 0;
        int nMinVal = 0;
        int nResultVal = 0;

        nTempX = Math.abs(x);
        nTempY = Math.abs(y);
        nMinVal = Math.min(nTempX, nTempY);
        nResultVal = (nTempX + nTempY - (nMinVal >> 1) - (nMinVal >> 2) + (nMinVal >> 4));
        return nResultVal;
    }

    /**
     * 删除文件或文件
     *
     * @param file[Input] 文件
     *                    2012.6.27 添加，多截图情况下，删除截图图片及目录
     */
    private void DeleteFiles(File file) {
        try {
            if (file.exists()) {

                if (file.isFile()) {
                    file.delete();
                } else if (file.isDirectory()) {
                    File files[] = file.listFiles();
                    for (int i = 0; i < files.length; i++) {
                        DeleteFiles(files[i]);
                    }
                    file.delete();
                }

            }

        } catch (Exception e) {
            // TODO Auto-generated catch block
            //	e.printStackTrace();

        }
    }

    private void GetCaptureImage(OUserInfo info, int nID) {
        //img
        FileInputStream in = null;
        String str = String.format("/ImgFiles/%d", nID);
        int nImageCount = 0;
        String strFile = "";


        File file = new File(strDataFile2 + "/" + strDataFile + str);
        if (file.exists()) {//存在该ID目录
            for (int i = 0; i < info.nImgNum; i++) {
                strFile = strDataFile2 + "/" + strDataFile + str + String.format("/%d.jpg", i);
                file = new File(strFile);
                if (file.exists()) {
                    try {
                        in = new FileInputStream(strFile);
                        int len = in.available();
                        byte[] pbTmp = new byte[len];
                        len = in.read(pbTmp);
                        System.arraycopy(pbTmp, 0, info.pbImageArray, nImageCount * info.nWidth * info.nHeight * 2, len);
                        info.pFacePos[nImageCount * 12 + 11] = len;
                        nImageCount++;
                        in.close();
                        pbTmp = null;
                    } catch (IOException e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }

                }
            }
            //附件模板
            strFile = strDataFile2 + "/" + strDataFile + str + "/plus.jpg";
            file = new File(strFile);
            if (file.exists()) {
                try {
                    in = new FileInputStream(strFile);
                    int len = in.available();
                    byte[] pbTmp = new byte[len];
                    len = in.read(pbTmp);
                    System.arraycopy(pbTmp, 0, info.pbImageArray, nImageCount * info.nWidth * info.nHeight * 2, len);
                    info.pFacePos[nImageCount * 12 + 11] = len;
                    nImageCount++;
                    in.close();
                    pbTmp = null;

                } catch (IOException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }

            }
        }

        info.nImgNum = nImageCount;
    }

    /**
     * 根据双眼的像素坐标计算双眼的真实坐标（X,Y,Z）
     *
     * @param nLeftEyeX[input]          左眼像素坐标X(图像像素坐标系以左上角点为原点)
     * @param nLeftEyeY[input]          左眼像素坐标Y(图像像素坐标系以左上角点为原点)
     * @param nRightEyeX[input]         右眼像素坐标X(图像像素坐标系以左上角点为原点)
     * @param nRightEyeY[input]         右眼像素坐标Y(图像像素坐标系以左上角点为原点)
     * @param nAvgEyeDist[input]        成年人平均眼距
     * @param nFunSegMent[input]        函数分段个数
     * @param pnEyeDistThresh[input]    函数分段的眼距值
     * @param pnNonLinearFunCoef[input] 非线性拟合的函数系数
     * @param pnLinearFunCoef[input]    线性拟合的函数系数
     * @param pnPixelPerCMXCoef[input]  x方向上关于距离和每个厘米中的像素个数的线性关系的系数
     * @param pnPixelPerCMYCoef[input]  y方向上关于距离和每个厘米中的像素个数的线性关系的系数
     * @param pnEyeRealPos[output]      真实坐标系下的左右眼位置，单位是cm
     * @param nEyeRealPosOffset[input]  pnEyeRealPos的起始位置
     * @param nImgWidth[input]          原始图像宽度
     * @param nImgHeight[input]         原始图像高度
     * @param nCameraHeight[input]      摄像头所架的高度
     */
    private void GetEyeRealPos(int nLeftEyeX, int nLeftEyeY, int nRightEyeX, int nRightEyeY, int nAvgEyeDist,
                               int nFunSegMent, int[] pnEyeDistThresh, long[] pnNonLinearFunCoef, long[] pnLinearFunCoef,
                               int[] pnPixelPerCMXCoef, int[] pnPixelPerCMYCoef, int[] pnEyeRealPos, int nEyeRealPosOffset,
                               int nImgWidth, int nImgHeight, int nCameraHeight) {
        int nEyeDist;
        int i = 0, j = 0;
        int nFunCoefLength = 22;
        int nEyeXFunLength = 9;
        int nEyeYFunLength = 9;
        int nEyeZFunLength = 4;
        int nImgCenterPixelX = nImgWidth / 2;
        int nImgCenterPixelY = nImgHeight / 2;
        int[] pLResEyeRealPos = new int[5];
        int dx, dy;

		/* 640_480 */

        nLeftEyeX = nLeftEyeX * 1280 / nImgWidth;
        nLeftEyeY = nLeftEyeY * 720 / nImgHeight;
        nRightEyeX = nRightEyeX * 1280 / nImgWidth;
        nRightEyeY = nRightEyeY * 720 / nImgHeight;

        nImgCenterPixelX = 640;
        nImgCenterPixelY = 360;

		/* 640_480 */

        nEyeDist = FastDistance2D((nLeftEyeX - nRightEyeX), (nLeftEyeY - nRightEyeY));
        for (i = 0; i < 5; i++)
            pLResEyeRealPos[i] = 0;

        //确定所属函数段
        for (i = 0; i < nFunSegMent; i++) {
            if (nEyeDist <= pnEyeDistThresh[i])
                break;
        }

        if (i != nFunSegMent && nEyeDist != 0) {
            // 左右眼X和Y坐标采用非线性函数进行估计
            pLResEyeRealPos[0] = GetLeftEyeXRealPos(nEyeDist, nLeftEyeX, pnNonLinearFunCoef, i * nFunCoefLength);
            pLResEyeRealPos[1] = GetLeftEyeYRealPos(nEyeDist, nLeftEyeY, pnNonLinearFunCoef, i * nFunCoefLength + nEyeXFunLength);
            pLResEyeRealPos[2] = GetRightEyeXRealPos(nLeftEyeX, nRightEyeX, nEyeDist, nAvgEyeDist, pLResEyeRealPos[0]);
            pLResEyeRealPos[3] = GetRightEyeYRealPos(nLeftEyeY, nRightEyeY, nEyeDist, nAvgEyeDist, pLResEyeRealPos[1]);
            pLResEyeRealPos[4] = GetEyeZRealPos(nEyeDist, pnNonLinearFunCoef, i * nFunCoefLength + nEyeXFunLength + nEyeYFunLength);

            //将真实位置转换为厘米单位
            for (j = 0; j < 5; j++) {
                pnEyeRealPos[j + nEyeRealPosOffset] = (pLResEyeRealPos[j]) / 10;
            }
        } else {
            //左右眼X和Y坐标采用视距的线性函数进行估计
            pLResEyeRealPos[4] = GetEyeZRealPosOri(nEyeDist, pnLinearFunCoef, 0);
            pnEyeRealPos[4 + nEyeRealPosOffset] = (pLResEyeRealPos[4] >> 17);
            dx = pnPixelPerCMXCoef[0] * pnEyeRealPos[4 + nEyeRealPosOffset] + pnPixelPerCMXCoef[1];
            dy = pnPixelPerCMYCoef[0] * pnEyeRealPos[4 + nEyeRealPosOffset] + pnPixelPerCMYCoef[1];
            pLResEyeRealPos[0] = (nImgCenterPixelX - nLeftEyeX) * dx;
            pLResEyeRealPos[1] = (nImgCenterPixelY - nLeftEyeY) * dy;
            pLResEyeRealPos[2] = (nImgCenterPixelX - nRightEyeX) * dx;
            pLResEyeRealPos[3] = (nImgCenterPixelY - nRightEyeY) * dy;
            for (i = 0; i < 4; i++)
                pnEyeRealPos[i + nEyeRealPosOffset] = (pLResEyeRealPos[i] >> 17);
            pnEyeRealPos[1 + nEyeRealPosOffset] += nCameraHeight;
            pnEyeRealPos[3 + nEyeRealPosOffset] += nCameraHeight;
        }
    }

    /**
     * 使用非线性函数计算左眼真实坐标系下的X坐标
     * 函数中的移位运算为防止数值计算时越界
     *
     * @param nEyeDist[input]:          左右眼间的像素距离
     * @param nLeftEyeX[input]:         像素坐标系下的左眼X坐标
     * @param pnLeftEyeXFunCoef[input]: 拟合函数的系数
     * @param nOffset[input]:           左眼X坐标系数在整个拟合函数中的偏移量
     * @return [output]: 真实坐标系下左眼的X坐标
     */
    private int GetLeftEyeXRealPos(int nEyeDist, int nLeftEyeX, long[] pnLeftEyeXFunCoef, int nOffset) {
        long nResLeftX = 0;
        long nTmp;
        int i, j;

        nResLeftX = (pnLeftEyeXFunCoef[7 + nOffset] >> 4) * nLeftEyeX;
        nResLeftX += ((pnLeftEyeXFunCoef[6 + nOffset] * nLeftEyeX * nLeftEyeX) >> 4);
        for (i = 0; i < 3; i++) {
            nTmp = 1;
            for (j = 0; j < 3 - i; j++)
                nTmp *= nEyeDist;
            nResLeftX += nTmp * (((pnLeftEyeXFunCoef[i + nOffset] * nLeftEyeX) >> 4) + (pnLeftEyeXFunCoef[i + 3 + nOffset] >> 4));
        }
        nResLeftX += (pnLeftEyeXFunCoef[8 + nOffset] >> 4);
        return (int) (nResLeftX >> 16);
    }

    /**
     * 使用非线性函数计算左眼真实坐标系下的Y坐标
     * 函数中的移位运算为防止数值计算时越界
     *
     * @param nEyeDist[input]:          左右眼间的像素距离
     * @param nLeftEyeY[input]:         像素坐标系下的左眼Y坐标
     * @param pnLeftEyeYFunCoef[input]: 拟合函数的系数
     * @param nOffset[input]:           左眼Y坐标系数在整个拟合函数中的偏移量
     * @return [output]: 真实坐标系下左眼的Y坐标
     */
    private int GetLeftEyeYRealPos(int nEyeDist, int nLeftEyeY, long[] pnLeftEyeYFunCoef, int nOffset) {
        long nResLeftY = 0;
        long nTmp;
        int i, j;

        nResLeftY = (pnLeftEyeYFunCoef[7 + nOffset] >> 3) * nLeftEyeY + ((pnLeftEyeYFunCoef[6 + nOffset] * nLeftEyeY * nLeftEyeY) >> 3);
        for (i = 0; i < 3; i++) {
            nTmp = 1;
            for (j = 0; j < 3 - i; j++)
                nTmp *= nEyeDist;
            nResLeftY += nTmp * (((pnLeftEyeYFunCoef[i + nOffset] * nLeftEyeY) >> 3) + (pnLeftEyeYFunCoef[i + 3 + nOffset] >> 3));
        }
        nResLeftY += (pnLeftEyeYFunCoef[8 + nOffset] >> 3);

        return (int) (nResLeftY >> 17);
    }

    /**
     * 基于左眼X坐标计算右眼真实坐标系下的X坐标
     *
     * @param nLeftEyeX[input]:        像素坐标系下的左眼X坐标
     * @param nRightEyeX[input]:       像素坐标系下的右眼X坐标
     * @param nEyeDist[input]:         像素坐标系下的左右眼距离
     * @param nAvgEyeDist[input]:      成人真实世界中的平均眼距，单位mm
     * @param nLeftEyeXRealPos[input]: 真实坐标系下的左眼X坐标，单位mm
     * @return [output]: 真实坐标系下的右眼X坐标。
     */
    private int GetRightEyeXRealPos(int nLeftEyeX, int nRightEyeX, int nEyeDist, int nAvgEyeDist, int nLeftEyeXRealPos) {
        int nResRightEyeX = nLeftEyeXRealPos;

        nResRightEyeX -= (nAvgEyeDist * (nRightEyeX - nLeftEyeX)) / nEyeDist;

        return nResRightEyeX;
    }

    /**
     * 基于右眼Y坐标计算右眼真实坐标系下的Y坐标
     *
     * @param nLeftEyeY[input]:        像素坐标系下的左眼Y坐标
     * @param nRightEyeY[input]:       像素坐标系下的右眼Y坐标
     * @param nEyeDist[input]:         像素坐标系下的左右眼距离
     * @param nAvgEyeDist[input]:      成人真实世界中的平均眼距，单位mm
     * @param nLeftEyeYRealPos[input]: 真实坐标系下的左眼Y坐标，单位mm
     * @return [output]: 真实坐标系下的右眼Y坐标。
     */
    private int GetRightEyeYRealPos(int nLeftEyeY, int nRightEyeY, int nEyeDist, int nAvgEyeDist, int nLeftEyeYRealPos) {
        int nResRightEyeY = nLeftEyeYRealPos;

        nResRightEyeY -= (nAvgEyeDist * (nRightEyeY - nLeftEyeY)) / nEyeDist;

        return nResRightEyeY;

    }

    /**
     * 计算双眼真实坐标系的Z坐标, 函数中的移位运算为防止数值计算时越界
     *
     * @param nEyeDist[input]:      左右眼间的像素距离
     * @param pnEyeZFunCoef[input]: 拟合函数的系数
     * @param nOffset[input]:       视距拟合系数在整个拟合函数中的偏移量
     * @return [output]: 真实坐标系下的Z坐标，即人机距离，单位是mm。
     */
    private int GetEyeZRealPos(int nEyeDist, long[] pnEyeZFunCoef, int nOffset) {
        long nResEyeZ = 0;
        int i, j;
        long nTmp;

        nResEyeZ = (pnEyeZFunCoef[3 + nOffset] >> 3);
        for (i = 0; i < 3; i++) {
            nTmp = 1;
            for (j = 0; j < 3 - i; j++)
                nTmp *= nEyeDist;
            nResEyeZ += nTmp * (pnEyeZFunCoef[i + nOffset] >> 3);
        }
        return (int) (nResEyeZ >> 17);
    }

    /**
     * 计算双眼真实坐标系的Z坐标, 用于眼距大于80时的测距计算
     *
     * @param nEyeDist[input]:      左右眼间的像素距离
     * @param pnEyeZFunCoef[input]: 拟合函数的系数
     * @param nOffset[input]:       视距拟合系数在整个拟合函数中的偏移量
     * @return [output]: 真实坐标系下的Z坐标，即人机距离，单位是cm*2^17。
     */
    private int GetEyeZRealPosOri(int nEyeDist, long[] pnEyeZFunCoef, int nOffset) {
        long nResEyeZ = 0;
        int i, j;
        long nTmp;

        nResEyeZ = pnEyeZFunCoef[3 + nOffset];
        for (i = 0; i < 3; i++) {
            nTmp = 1;
            for (j = 0; j < 3 - i; j++)
                nTmp *= nEyeDist;
            nResEyeZ += nTmp * (pnEyeZFunCoef[i + nOffset]);
        }
        return (int) nResEyeZ;
    }

    /**
     * Int值字节数组
     *
     * @param nSend int值
     * @return
     */
    private byte[] Int2Bytes(int nSend) {
        byte[] pbTemp = new byte[4];

        for (int i = 0; i < 4; i++) {
            pbTemp[i] = (byte) ((nSend >> (i * 8)) & 0xff);

        }
        return pbTemp;
    }

    /***
     * 核心初始化
     * @return [output] 见接口返回值说明
     */
    public int InitFaceEngine(String strSerialNumber) {
        int nResult = 0;
        FileInputStream in = null;

        nResult = CreateInitDataFile(1);

        try {
            File dstFile = new File(strDataFile2 + "/" + strDataFile + "/hwFeature.dat");
            if (dstFile.exists()) {
                in = new FileInputStream(strDataFile2 + "/" + strDataFile + "/hwFeature.dat");
                iSrcAllocSize = in.available();
                if (iSrcAllocSize > 0) {
                    srcAllocBuffer = new byte[iSrcAllocSize];
                    in.read(srcAllocBuffer);
                } else {
                    nResult = CreateInitDataFile(iSrcAllocSize);
                    iSrcAllocSize = in.available();
                    if (iSrcAllocSize > 0) {
                        srcAllocBuffer = new byte[iSrcAllocSize];
                        in.read(srcAllocBuffer);
                    }
                }
                in.close();
            }
        } catch (Exception ex) {
            nResult = HW_FAIL;
            ex.printStackTrace();
        }

		/*
        if( iSrcAllocSize == 0 )
			return HW_FAIL;
		else
		*/
        byte[] data = new byte[16];
        byte[] value = Int2Bytes(0);
        System.arraycopy(value, 0, data, 0, 4);
        eDetectionDistanceRule nDisRule;
        nDisRule = eDetectionDistanceRule.Detect_Near;
        value = Int2Bytes(nDisRule.ordinal());
        System.arraycopy(value, 0, data, 4, 4);
        eDetectionStrictRule nStrRule;
        nStrRule = eDetectionStrictRule.Detect_Common;
        value = Int2Bytes(nStrRule.ordinal());
        System.arraycopy(value, 0, data, 8, 4);
        eRecogRule nRecRule;
        nRecRule = eRecogRule.Recog_Loose;
        value = Int2Bytes(nRecRule.ordinal());
        System.arraycopy(value, 0, data, 12, 4);

        if (srcAllocBuffer == null) {
            srcAllocBuffer = new byte[16];
            System.arraycopy(data, 0, srcAllocBuffer, 0, 16);
            iSrcAllocSize = 16;
        }
        strUUID = strSerialNumber;
        nResult = FaceCoreHelper.InitFaceEngine(srcAllocBuffer, iSrcAllocSize, strUUID);
        FaceCoreHelper.SetFaceDetectionRule(1, 3);
        FaceCoreHelper.SetfaceRecognitionRule(2);
        return nResult;
    }

    /***
     * 核心释放各资源
     * @return [output] 见接口返回值说明
     */
    public int ReleaseFaceEngine() {
        int nResult = 0;
        nResult = FaceCoreHelper.ReleaseFaceEngine();

        srcAllocBuffer = null;
        iSrcAllocSize = 0;
        return nResult;
    }

    /***
     *检测人脸，给出图像中人脸结构列表，每个人脸结构需要包含以下内容：
     *1.人脸ID（图像检测到人脸序号,每帧图像比如从左到右检测到人脸编号1、2、等等）
     *2.人脸区域（包括中心点和，区域长宽）
     *3.人脸可信度（此人脸图像是否满足识别引擎的需求）	
     * @param pbImg[input] 输入图片
     * @param nWidth[input] 输入图片的宽度
     * @param nHeight[input] 输入图片的高度
     * @param pFacePos[output]   人脸的定位。外部传入空间，nMaxFaceNum个FacePos单元
     * @param nMaxFaceNum[input] 需要检测出人脸个数的上限。如果nMaxFaceNum == 1 则只进行单人脸检测
     * @param pnDetectFaceNum[output] 实际检测得到的人脸个数
     * @return [output] 见接口返回值说明
     */
    public int FaceDetection(byte[] pbImg, int nWidth, int nHeight, int[] pFacePos, int nMaxFaceNum, int[] pnDetectFaceNum) {
        int result = HW_OK;
        if (pbImg == null) {
            return HW_INVALID_IMG;
        }
        if (pFacePos == null || pnDetectFaceNum == null) {
            return HW_INVALID_ARG;
        }
        result = FaceCoreHelper.FaceDetection(pbImg, nWidth, nHeight, pFacePos, nMaxFaceNum, pnDetectFaceNum);

        return result;
    }


    /**
     * 脸部特征转化函数
     *
     * @param pFacePos[input]     多张人脸特征数组
     * @param facePosIndex[input] 每张人脸特征数组起始index
     * @return[output] FacePos结构体
     */
    public FacePos GetFacePos(int[] pFacePos, int facePosIndex) {
        FacePos facepos = new FacePos();

        if (pFacePos == null) {
            return facepos;
        }

        if (pFacePos.length > 12 * 16) {
            return facepos;
        }

        facepos.nID = pFacePos[facePosIndex];
        facepos.nCol = pFacePos[facePosIndex + 1];
        facepos.nRow = pFacePos[facePosIndex + 2];
        facepos.nWidth = pFacePos[facePosIndex + 3];
        facepos.nHeight = pFacePos[facePosIndex + 4];
        facepos.leftx = pFacePos[facePosIndex + 5];
        facepos.lefty = pFacePos[facePosIndex + 6];
        facepos.rightx = pFacePos[facePosIndex + 7];
        facepos.righty = pFacePos[facePosIndex + 8];
        facepos.nConfidence = pFacePos[facePosIndex + 9];
        facepos.nSelected = pFacePos[facePosIndex + 10];
        facepos.nJpgLength = pFacePos[facePosIndex + 11];

        return facepos;
    }

    /***
     * 设置人脸检测的严格程度
     * @param nDisRule[input]远近参数
     * @param nStrRule[input]严格程度参数
     * @return[output] 见接口返回值说明
     */
    public int SetFaceDetectionRule(eDetectionDistanceRule nDisRule, eDetectionStrictRule nStrRule) {
        int nResult = HW_OK;
        FileOutputStream out = null;

        nResult = FaceCoreHelper.SetFaceDetectionRule(nDisRule.ordinal(), nStrRule.ordinal());


        if (nResult == HW_OK) {
            if ((srcAllocBuffer != null) && (iSrcAllocSize > 12)) {
                byte[] value = Int2Bytes(nDisRule.ordinal());
                System.arraycopy(value, 0, srcAllocBuffer, 4, 4);
                value = Int2Bytes(nStrRule.ordinal());
                System.arraycopy(value, 0, srcAllocBuffer, 8, 4);

                //写文件
                try {
                    out = new FileOutputStream(strDataFile2 + "/" + strDataFile + "/hwFeature.dat");
                    out.write(srcAllocBuffer);
                    out.close();
                } catch (Exception e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
            }

        }


        return nResult;
    }

    /***
     * 获取人脸检测的严格程度
     * @param nDisRule[output]远近参数
     * @param nStrRule[output]严格程度参数
     * @return[output] 见接口返回值说明
     */
    public int GetCurrentFaceDetectionRule(int[] nDisRule, int[] nStrRule) {
        if (nDisRule == null || nStrRule == null) {
            return HW_INVALID_ARG;
        }

        return FaceCoreHelper.GetCurrentFaceDetectionRule(nDisRule, nStrRule);
    }

    /***
     * 设置人脸识别的严格程度
     * @param nRule [input]输入严格程度:  Recog_Loose, Recog_Common, Recog_Strict
     * @return 见接口返回值说明
     */
    public int SetfaceRecognitionRule(eRecogRule nRule) {
        int nResult = HW_OK;
        FileOutputStream out = null;

        nResult = FaceCoreHelper.SetfaceRecognitionRule(nRule.ordinal());

        if (nResult == HW_OK) {
            if ((srcAllocBuffer != null) && (iSrcAllocSize > 12)) {
                byte[] value = Int2Bytes(nRule.ordinal());
                System.arraycopy(value, 0, srcAllocBuffer, 12, 4);

                //写文件
                try {
                    out = new FileOutputStream(strDataFile2 + "/" + strDataFile + "/hwFeature.dat");
                    out.write(srcAllocBuffer);
                    out.close();
                } catch (Exception e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
            }

        }

        return nResult;
    }

    /***
     * 获取人脸识别的严格程度设置
     * @param nRule [output] 输出严格程度:  Recog_Loose, Recog_Common, Recog_Strict
     * @return 见接口返回明
     */
    public int GetfaceRecognitionRule(int[] nRule) {
        if (nRule == null) {
            return HW_INVALID_ARG;
        }
        return FaceCoreHelper.GetfaceRecognitionRule(nRule);
    }


    /***
     * 判断当前图像的亮度是否可识别，并给出调整建议
     * @param pbImg [input] 输入图片
     * @param nWidth [input] 输入图片的宽度
     * @param nHeight [input] 输入图片的高度
     * @param pFacePos [input] 人脸的定位。应用程序先调用 FaceDetection()得到的人脸定位信息。
     *                         如果还没有做FaceDetection，则输入NULL. 核心内部自行做人脸检测
     * @param nFaceNum [input] pFacePos 中人脸个数。如果还没有做 FaceDetection()则输入0
     * @param pnSuggestion [output] Suggest_OK, Suggest_Dark, Suggest_Bright
     * @return [output] 见接口返回值说明
     */
    public int EnviromentVerfication(byte[] pbImg, int nWidth, int nHeight, int[] pFacePos, int nFaceNum, int[] pnSuggestion) {
        int result = HW_OK;
        if (pbImg == null) {
            return HW_INVALID_IMG;
        }
        if (pFacePos == null || pnSuggestion == null) {
            return HW_INVALID_ARG;
        }
        result = FaceCoreHelper.EnviromentVerfication(pbImg, nWidth, nHeight, pFacePos, nFaceNum, pnSuggestion);

        return result;
    }

    /***
     * 增加一个新用户，输入为人脸图像序列，输出为是否加入成功，如果成功则返回新增人脸用户的ID
     * @param info [input] 输入图像序列，以及每个图片中该客户的人脸定位信息, 和该客户的名称，
     *                     如果人脸定位信息为0，则内部自行做人脸定位。取每个图片中定位最可靠的人脸作为该客户人脸图片。
     * @param nTempletNum [input] 控制生成的模板个数。应用程序可控制每个用户的模板个数。输入图片个数需要大于模板个数，以防有的图片定位失败或提取特征失败。
     * @param bDetected [input] False:表示需要人脸检测以后才能录入；
     * 							True:表示不需要人脸检测直接录入。
     * @param pnID [output] 输出新增用户的ID. 如果增加失败(包括成功取得的模板个数小于要求的nTempletNum值) *pnID 输出值为-1。
     * @return 见接口返回值说明。
     */
    public int AddNewUser(OUserInfo info, int nTempletNum,
                          Boolean bDetected, int[] pnID) {
        int result = HW_OK;
        if (info == null) {
            return HW_SDK_USERINFO_NOTEXIST;
        }

        if (pnID == null) {
            return HW_SDK_USERINFO_ERRID;
        }

        if (info.pbImageArray == null || info.pFacePos == null) {
            return HW_INVALID_ARG;
        }
        byte[] pSrcData = new byte[iSrcAllocSize + 15000];
        int[] pnSize = new int[1];
        if (!bDetected)
            result = FaceCoreHelper.AddNewUser(info.pbImageArray, 0,
                    info.nImgNum, info.nWidth, info.nHeight, info.pFacePos,
                    info.strName, nTempletNum, pnID, pSrcData, pnSize);
        else
            result = FaceCoreHelper.AddNewUser(info.pbImageArray, 1,
                    info.nImgNum, info.nWidth, info.nHeight, info.pFacePos,
                    info.strName, nTempletNum, pnID, pSrcData, pnSize);
        // 保存数据
        // if( result == HW_OK)
        if (pnID[0] != -1) {
            if ((pSrcData != null) && (pSrcData.length > 12)) {
                FileOutputStream out = null;

                // 写文件
                try {
                    out = new FileOutputStream(strDataFile2 + "/" + strDataFile + "/hwFeature.dat");
                    out.write(pSrcData, 0, pnSize[0]);
                    out.flush();
                    out.close();

                } catch (Exception e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
                int[] nIndex = new int[1];
                GetUserIDCount(nIndex);
                // 保存截图
                SaveCaptureImg(info, pnID[0], nIndex[0] + 1);
            }

            ReleaseFaceEngine();
            InitFaceEngine(strUUID);
        }

        return result;
    }

    /***
     * 用户首选图片函数
     * @param info [input] 输入图像序列，以及每个图片中该客户的人脸定位信息, 和该客户的名称，
     *                     如果人脸定位信息为0，则内部自行做人脸定位。取每个图片中定位最可靠的人脸作为该客户人脸图片。
     * @param nID [input] 输入新增用户的ID.
     * @param jpgImg [input] JPG图片数据
     * @param length [input] JPG图片长度
     * @return
     */
    public int SaveCaptureImg(OUserInfo info, int nID, int nIndex) {
        if (nID == -1)
            return -1;
        /*
         * String str = String.format("/%d.jpg", nID); String filePath =
		 * strDataFile; String strPath = null; try{ File root = new
		 * File(filePath + "/ImgFiles"); if(!root.exists()){ root.mkdir(); }
		 * strPath = filePath + "/ImgFiles"+str; } catch(Exception e) { // TODO
		 * Auto-generated catch block e.printStackTrace(); }
		 */
        int count = 0;
        String str = String.format("/%d", nID);
        String filePath = strDataFile2 + "/" + strDataFile;
        String strPath = null;
        try {
            File root = new File(filePath + "/ImgFiles");
            if (!root.exists()) {
                root.mkdir();
            }
            // 创建目录
            strPath = filePath + "/ImgFiles" + str;
            root = new File(strPath);
            if (!root.exists()) {
                root.mkdir();
            }
        } catch (Exception e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        if (strPath != null) {
            int startX = 0;
            int startY = 0;
            int stopX = 0;
            int stopY = 0;
            for (int i = 0; i < 10; i++) {
                FacePos facepos = GetFacePos(info.pFacePos, i * 12);
                if (facepos.nSelected > 0) {

                    startX = facepos.nCol - ((facepos.nWidth / 2));
                    startY = facepos.nRow - ((facepos.nHeight / 2));
                    stopX = facepos.nCol + ((facepos.nWidth / 2));
                    stopY = facepos.nRow + ((facepos.nHeight / 2));

                    int bmpWidth = ((stopX - startX) >> 2) << 2;
                    int bmpHeight = ((stopY - startY) >> 1) << 1;

                    byte[] img = new byte[bmpWidth * bmpHeight * 2];
                    CropYUVNV21(info.pbImageArray, i * info.nWidth
                                    * info.nHeight * 2, info.nWidth, info.nHeight,
                            bmpWidth, bmpHeight, (startY >> 1) << 1,
                            (startX >> 1) << 1, img);
                    YuvImage yuvimage = new YuvImage(img, ImageFormat.NV21,
                            bmpWidth, bmpHeight, null);

                    ByteArrayOutputStream baos = new ByteArrayOutputStream();
                    yuvimage.compressToJpeg(
                            new Rect(0, 0, bmpWidth, bmpHeight), 100, baos);

                    Bitmap bmp = BitmapFactory.decodeByteArray(baos.toByteArray(), 0, baos.toByteArray().length);
                    bmp = UtilFunc.toImgScale(bmp, 160, 160);
                    try {
                        baos.close();
                    } catch (IOException e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }

                    try {
                        String strCapImg = strPath + "/" + String.format("%d.jpg", count);
                        File myCaptureFile = new File(strCapImg);
                        FileOutputStream outStream = null;
                        outStream = new FileOutputStream(myCaptureFile);
                        bmp.compress(Bitmap.CompressFormat.JPEG, 100, outStream);
                        try {
                            outStream.flush();
                        } catch (IOException e) {
                            e.printStackTrace();
                        }

                        outStream.close();
                        /*
                         * bos.flush(); bos.close();
						 */
                    } catch (IOException e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }

                    try {
                        baos.close();
                    } catch (IOException e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }

                    count++;
                    // break;
                }
            }
        }
        return 0;
    }


    /***
     * //附加用户模板
     * //info   [input] 输入图像序列，以及每个图片中该客户的人脸定位信息, 和该客户的名称，
     * 如果人脸定位信息为0，则内部自行做人脸定位。取每个图片中定位最可靠的人脸作为该客户人脸图片。
     * //nID [input]输入新增用户ＩＤ
     * //jpgImg [input]JPG图片数据
     * //length [input]JPG图片产度
     * //SrcWidth [input]源图像宽度
     * //SrcHeight [input]源图像高度
     * //DstWidth [input]目标图像宽度
     * //DstHeight [input]目标图像高度
     * //return: 见接口返回值说明。
     */
    public int ReplaceCaptureImg(OUserInfo info, int nID, byte[] pbJpgImg, int nLength, int nSrcWidth, int nSrcHeight, int nDstWidth, int nDstHeight) {
        int returnNum = -1;
        if (nID == -1) return returnNum;
    /*
         String str = String.format("/%d.jpg", nID);
		String filePath =strDataFile +"/ImgFiles";
		String strPath = null;
		try{
			File root = new File(filePath);
			if(!root.exists()){
				root.mkdir();
			}
			strPath = filePath + str;
		}
		catch(Exception e)
		{
			// TODO Auto-generated catch block
			strPath = null;
			e.printStackTrace();
		}
		*/
        String str = String.format("/%d", nID);
        String filePath = strDataFile2 + "/" + strDataFile;
        String strPath = null;
        try {
            File root = new File(filePath + "/ImgFiles");
            if (!root.exists()) {
                root.mkdir();
            }
            //创建目录
            strPath = filePath + "/ImgFiles" + str;
            root = new File(strPath);
            if (!root.exists()) {
                root.mkdir();
            }
        } catch (Exception e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        float xRatio = (float) nSrcWidth / (float) nDstWidth;
        float yRatio = (float) nSrcHeight / (float) nDstHeight;
        int cropX = 0;
        int cropY = 0;
        int cropWidth = 0;
        int cropHeight = 0;

        for (int i = 0; i < 10; i++) {
            FacePos facepos = GetFacePos(info.pFacePos, i * 12);
            if (facepos.nSelected > 0) {
                if (facepos.nWidth > 250)
                    return returnNum;
                cropX = (int) ((facepos.nCol - (3 * facepos.nWidth / 2)) * xRatio);
                cropX = cropX < 0 ? 0 : cropX;
                cropY = (int) ((facepos.nRow - (3 * facepos.nHeight / 2)) * yRatio);
                cropY = cropY < 0 ? 0 : cropY;
                cropWidth = (int) ((3 * facepos.nWidth) * xRatio);
                cropWidth = cropX + cropWidth > nSrcWidth ? nSrcWidth - cropX : cropWidth;
                cropHeight = (int) ((3 * facepos.nHeight) * yRatio);
                cropHeight = cropY + cropHeight > nSrcHeight ? nSrcHeight - cropY : cropHeight;
                break;
            }
        }

        if (strPath != null) {
            Bitmap testBitMap = BitmapFactory.decodeByteArray(pbJpgImg, 0, nLength);

            Bitmap newCropBitMap = Bitmap.createBitmap(testBitMap, cropX, cropY, cropWidth, cropHeight);

            int width = newCropBitMap.getWidth();
            int height = newCropBitMap.getHeight();
            int[] pixels = new int[width * height];
            byte[] pDstData = new byte[iSrcAllocSize + 15000];
            int[] pnDataSize = new int[1];
            newCropBitMap.getPixels(pixels, 0, width, 0, 0, width, height);
            int[] pFacePos = new int[12];
            returnNum = FaceCoreHelper.AddPlusTemplate(pixels, width, height, nID, pFacePos, pDstData, pnDataSize);

            if ((returnNum == 0) && (pnDataSize[0] > 0)) {
                FileOutputStream out = null;

                //写文件
                try {
                    out = new FileOutputStream(strDataFile2 + "/" + strDataFile + "/hwFeature.dat");
                    out.write(pDstData, 0, pnDataSize[0]);
                    out.flush();
                    out.close();

                } catch (Exception e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
            }

            int startX = 0;
            int startY = 0;
            int stopX = 0;
            int stopY = 0;

            if (pnDataSize[0] > 0 && returnNum == 0) {
                FacePos facepos = GetFacePos(pFacePos, 0);
                if (facepos.nConfidence > 0) {
                    startX = facepos.nCol - ((facepos.nWidth / 2));
                    startY = facepos.nRow - ((facepos.nHeight / 2));
                    stopX = facepos.nCol + ((facepos.nWidth / 2));
                    stopY = facepos.nRow + ((facepos.nHeight / 2));
                }

                try {
                    Bitmap newBitMap = Bitmap.createBitmap(newCropBitMap, startX, startY, stopX - startX, stopY - startY);
                    strPath += "/plus.jpg";
                    File myCaptureFile = new File(strPath);
                    BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(myCaptureFile));
                    newBitMap.compress(Bitmap.CompressFormat.JPEG, 100, bos);
                    bos.flush();
                    bos.close();
                    if (newBitMap != null && !newBitMap.isRecycled()) {
                        newBitMap.recycle();
                        newBitMap = null;
                    }
                } catch (IOException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
            }

            ReleaseFaceEngine();
            InitFaceEngine(strUUID);

            if (newCropBitMap != null && !newCropBitMap.isRecycled()) {
                newCropBitMap.recycle();
                newCropBitMap = null;
            }

            if (testBitMap != null && !testBitMap.isRecycled()) {
                testBitMap.recycle();
                testBitMap = null;
            }
        }

        return returnNum;
    }

    /***
     * 从人脸库中删除指定ID的人脸用户信息
     * @param nID [input] 用户ID
     * @return 见接口返回值说明。
     */
    public int RemoveUserbyID(int nID) {
        int result = HW_OK;
        int[] pnSize = new int[1];
        pnSize[0] = iSrcAllocSize;
        result = FaceCoreHelper.RemoveUserbyID(nID, srcAllocBuffer, pnSize);
        //保存数据
        if (result == HW_OK) {
            if ((srcAllocBuffer != null) && (pnSize[0] > 12)) {
                FileOutputStream out = null;

                //写文件
                try {
                    out = new FileOutputStream(strDataFile2 + "/" + strDataFile + "/hwFeature.dat");
                    out.write(srcAllocBuffer, 0, pnSize[0]);
                    out.close();

                } catch (Exception e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }

                try {
                /*	int[] nIndex = new int[1];
                    FaceCoreHelper.GetIndexbyUserID(nID,nIndex);
					String str = String.format(strDataFile +"/ImgFiles/%d.jpg",nID);	
					File file = new File(str);
					if( file.exists())
					{
						file.delete();
					}		
				*/
                    String str = String.format(strDataFile2 + "/" + strDataFile + "/ImgFiles/%d", nID);
                    File file = new File(str);
                    DeleteFiles(file);

                } catch (Exception e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }

            }

            ReleaseFaceEngine();
            InitFaceEngine(strUUID);
        }
        return result;
    }


    /***
     * 获取人脸库中人脸用户ID的个数
     * @param pnNum [output] 输出用户ID个数。
     * @return 见接口返回值说明。
     */
    public int GetUserIDCount(int[] pnNum) {
        if (pnNum == null) {
            return HW_INVALID_ARG;
        }
        return FaceCoreHelper.GetUserIDCount(pnNum);
    }

    /***
     * 获取人脸库中第N个用户Index的ID值
     * @param nIndex [input] 序号,从1开始计数
     * @param pnID [output] 输出用户ID
     * @return 见接口返回值说明。
     */
    public int GetUserIDbyIndex(int nIndex, int[] pnID) {
        if (pnID == null) {
            return HW_SDK_USERINFO_ERRID;
        }
        return FaceCoreHelper.GetUserIDbyIndex(nIndex, pnID);
    }

    /***
     * 更新指定用户ID的用户信息（人脸图像序列，名称等）
     * @param nID nID [input]指定用户ID
     * @param bDetected [input] True:表示需要人脸检测以后才能录入；
     * 							False:表示不需要人脸检测直接录入。
     * @param info [input] 如果只更新用户模板图片，则 结构中strName[0] = 0，
     * 						 而 pbImgArray、nImgNum、 nWidth、 nHeight、 pFacePos输入图片序列有效信息，见AddNewUser.
     * 						（ 如果某张图片的 pFacePos内容为0，则核心内部自行调用人脸定位，认为最可靠的那个人脸为用户人脸)。
     * 						如果只更新用户名称，则结构中不用传入图片序列， nImgNum = 0, strName 输入有效的用户名称。
     * @return 见接口返回值说明。
     */
    public int UpdateUserInfobyID(int nID, Boolean bDetected, OUserInfo info) {
        int result = HW_OK;
        if (info == null) {
            return HW_SDK_USERINFO_NOTEXIST;
        }

        if (info.pbImageArray == null || info.pFacePos == null) {
            return HW_INVALID_ARG;
        }
        int[] pnSize = new int[1];
        pnSize[0] = iSrcAllocSize;
        if (!bDetected)
            result = FaceCoreHelper.UpdateUserInfobyID(nID, info.pbImageArray, 0, info.nImgNum, info.nWidth, info.nHeight, info.pFacePos, info.strName, srcAllocBuffer, pnSize);
        else
            result = FaceCoreHelper.UpdateUserInfobyID(nID, info.pbImageArray, 1, info.nImgNum, info.nWidth, info.nHeight, info.pFacePos, info.strName, srcAllocBuffer, pnSize);

        //保存数据
        if (result == HW_OK) {
            if ((srcAllocBuffer != null) && (pnSize[0] > 12)) {
                FileOutputStream out = null;

                //写文件
                try {

                    out = new FileOutputStream(strDataFile2 + "/" + strDataFile + "/hwFeature.dat");
                    out.write(srcAllocBuffer, 0, pnSize[0]);
                    out.close();

                } catch (Exception e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }

                try {
                    int[] nIndex = new int[1];
                    FaceCoreHelper.GetIndexbyUserID(nID, nIndex);
                    //保存截图
                    SaveCaptureImg(info, nID, nIndex[0]);

                } catch (Exception e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }

            }
        }

        return result;
    }

    /***
     * 更新指定用户ID的用户信息（人脸图像序列，名称等）
     * @param nID nID [input]指定用户ID
     * @param strName [input] strName 输入有效的用户名称。
     * @return 见接口返回值说明。
     */
    public int UpdateUserNamebyID(int nID, String strName) {
        int result = HW_OK;

        int[] pnSize = new int[1];
        pnSize[0] = iSrcAllocSize;
        result = FaceCoreHelper.UpdateUserNamebyID(nID, strName, srcAllocBuffer, pnSize);
        //保存数据
        if (result == HW_OK) {
            if ((srcAllocBuffer != null) && (pnSize[0] > 12)) {
                FileOutputStream out = null;
                //写文件
                try {
                    out = new FileOutputStream(strDataFile2 + "/" + strDataFile + "/hwFeature.dat");
                    out.write(srcAllocBuffer, 0, pnSize[0]);
                    out.close();

                } catch (Exception e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }

            }
        }

        return result;
    }


    /***
     * 获取指定ID用户的用户信息（人脸图像序列，名称等）
     * @param nID [input]指定用户ID
     * @param info [input，output]
     *        输出: 把用户的图片拷贝到pbImgArray 各个图片内存中，
     *              注意，核心为了节省空间，会把图片裁减，只包含用户人脸，把其余的部分去掉，规一化到统一的大小。
     *              结构中nImgNum输出为实际拷贝到pbImgArray 的图片个数。
     *              结构中pFacePos 输出为裁减后每个图片的用户人脸定位信息。
     *              结构中nWidth, nHeight输出为裁减后各个图片规一化的统一大小。
     *              strName输出用户名称
     * @return 见接口返回值说明。
     */
    public int GetUserInfobyID(int nID, OUserInfo info) {
        if (info == null) {
            return HW_SDK_USERINFO_NOTEXIST;
        }

        if (info.pbImageArray == null || info.pFacePos == null) {
            return HW_INVALID_ARG;
        }

        int[] nImgNum = new int[1];
        int[] nWidth = new int[1];
        int[] nHeight = new int[1];
        byte[] strName = new byte[32];
        nWidth[0] = info.nWidth;
        nHeight[0] = info.nHeight;
        nImgNum[0] = info.nImgNum;

        int[] pnIndex = new int[1];
        int result = FaceCoreHelper.GetUserInfobyID(nID, info.pbImageArray, nImgNum, nWidth, nHeight, info.pFacePos, strName, pnIndex);
        info.nWidth = nWidth[0];
        info.nHeight = nHeight[0];
        //	info.nImgNum = nImgNum[0];
        info.strName = FileUtils.getStringFromBytes(strName);
        info.strId = nID;
        GetCaptureImage(info, nID);

        return result;
    }

    /***
     * 获取指定Index用户的用户信息（人脸图像序列，名称等）
     * @param nIndex [input] 指定用户序号
     * @param info  [input，output]
     *        输出: 把用户的图片拷贝到pbImgArray 各个图片内存中，
     *              注意，核心为了节省空间，会把图片裁减，只包含用户人脸，把其余的部分去掉，规一化到统一的大小。
     *              结构中nImgNum输出为实际拷贝到pbImgArray 的图片个数。
     *              结构中pFacePos 输出为裁减后每个图片的用户人脸定位信息。
     *              结构中nWidth, nHeight输出为裁减后各个图片规一化的统一大小。
     *              strName输出用户名称
     * @return 见接口返回值说明
     */
    public int GetUserInfobyIndex(int nIndex, OUserInfo info) {
        if (info == null) {
            return HW_SDK_USERINFO_NOTEXIST;
        }

        if (info.pbImageArray == null || info.pFacePos == null) {
            return HW_INVALID_ARG;
        }

        int[] nImgNum = new int[1];
        int[] nWidth = new int[1];
        int[] nHeight = new int[1];
        byte[] strName = new byte[32];
        nWidth[0] = info.nWidth;
        nHeight[0] = info.nHeight;
        nImgNum[0] = info.nImgNum;
        int[] pnID = new int[1];

        int result = FaceCoreHelper.GetUserInfobyIndex(nIndex, info.pbImageArray, nImgNum, nWidth, nHeight, info.pFacePos, strName, pnID);
        info.nWidth = nWidth[0];
        info.nHeight = nHeight[0];
        //	info.nImgNum = 1;
        info.strName = FileUtils.getStringFromBytes(strName);
        GetCaptureImage(info, pnID[0]);

        return result;
    }


    /***
     * 当前人像是否在库中，返回对应人脸用户ID
     * @param pbImg [input] 输入图片
     * @param nWidth [input] 图片宽度宽度
     * @param nHeight [input] 图片宽度高度
     * @param pFacePos [input] 输入需要识别人脸的定位信息。
     * 							应用程序调用FaceDetection()过程得到图片pbImg中一个或多个人脸的定位信息，
     * 							然后再分几次调用本过程，每次输入其中的一个人脸的定位信息FacePos，进行该人脸的识别。
     * 							如果尚没有做FaceDetection()过程，可以输入NULL, 核心仅对其中检测出的最可靠的一张人脸进行识别。
     * @param pnID  [output] 如果输入图片对应的人在库中，*pnID 输出用户ID； 不在库中或因为与模板比对得分可靠性较低而拒识，*pnID 输出-1
     * 					如果没有检测出人脸，*pnID返回-1。
     * @param pnScore pnScore[output]  返回相似度
     * @param pnFtrPos  [output] 返回匹配模板标示
     * @return 见接口返回值说明。
     */
    public int FaceRecognition(byte[] pbImg, int nWidth, int nHeight, int[] pFacePos, int[] pnID, int[] pnScore, int[] pnFtrPos) {
        int result = HW_OK;
        if (pbImg == null) {
            return HW_INVALID_IMG;
        }
        if (pFacePos == null) {
            return HW_INVALID_ARG;
        }
        if (pnID == null) {
            return HW_SDK_USERINFO_ERRID;
        }
        if (pnScore == null) {
            return HW_SDK_USERINFO_ERRID;
        }
        if (pnFtrPos == null) {
            return HW_SDK_USERINFO_ERRID;
        }
        int[] pnDataSize = new int[1];
        pnDataSize[0] = srcAllocBuffer.length;
        result = FaceCoreHelper.FaceRecognition(pbImg, nWidth, nHeight, pFacePos, pnID, pnScore, pnFtrPos, srcAllocBuffer, pnDataSize);
        MyLog.i("HWFaceLib", "需要识别的图片尺寸:" + pbImg.length +
                "\n识别结果:" + result +
                "\n图片宽度:" + nWidth +
                "\n图片高度:" + nHeight +
                "\n脸部位置:" + Arrays.toString(pFacePos) +
                "\n得分" + pnScore[0] +
                "\n用户id:" + pnID[0] +
                "\n模板位置：" + pnFtrPos[0] +
                "\n缓冲区尺寸:" + pnDataSize[0]
                + "\n缓冲区内容:" + Arrays.toString(srcAllocBuffer)
        );
        if (result == -8) {
            return HW_NOT_VALID_SERIALNUMBER;
        }
        if (pnScore[0] != -1)
            pnScore[0] = ChangeDistToScore(pnScore[0]);

        if (result == 1) {
            if ((srcAllocBuffer != null) && (pnDataSize[0] > 12)) {
                FileOutputStream out = null;
                //写文件
                try {
                    out = new FileOutputStream(strDataFile2 + "/" + strDataFile + "/hwFeature.dat");
                    out.write(srcAllocBuffer, 0, pnDataSize[0]);
                    out.close();

                } catch (Exception e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }

            }
        }

        return result;
    }


    /***
     * 当前人像是否满足可识别要求，并给出原因
     * 此接口只是用于简单的确认（图像大小是否满足需求，脸是否为正面），运算量不宜过大
     * @param pbImg [input] 输入图片
     * @param nWidth [input] 输入图片宽度
     * @param nHeight [input] 输入图片高度
     * @param pFacePos [input] 输入需要处理的人脸的定位信息。
     * 							应用程序调用FaceDetection()过程得到图片pbImg中一个或多个人脸的定位信息，
     * 							然后再分几次调用本过程，每次输入其中的一个人脸的定位信息FacePos，进行该人脸的确认。
     * 							如果尚没有做FaceDetection()过程，可以输入NULL, 核心仅检测出最可靠的一张人脸进行确认。
     * @param pnReason [output] 输出确认信息。    FaceVerify_None, FaceVerify_OK,   FaceVerify_Small,   FaceVerify_Slant，FaceVerify_Fail
     * @return 见接口返回值说明。
     */
    public int FaceVerifationforRecognition(byte[] pbImg, int nWidth, int nHeight, int[] pFacePos, int[] pnReason) {
        int result = HW_OK;
        if (pbImg == null) {
            return HW_INVALID_IMG;
        }
        if (pFacePos == null || pnReason == null) {
            return HW_INVALID_ARG;
        }
        result = FaceCoreHelper.FaceVerifationforRecognition(pbImg, nWidth, nHeight, pFacePos, pnReason);

        return result;
    }

    /***
     * 当前人像是否满足新用户加入需求
     * @param pbImg [input] 输入图片
     * @param nWidth [input] 输入图片宽度
     * @param nHeight [input] 输入图片高度
     * @param pFacePos [input] 输入需要处理的人脸的定位信息。
     * 							应用程序调用FaceDetection()过程得到图片pbImg中一个或多个人脸的定位信息，
     * 							然后再分几次调用本过程，每次输入其中的一个人脸的定位信息FacePos，进行该人脸的确认。
     * 							如果尚没有做FaceDetection()过程，可以输入NULL, 核心仅检测出最可靠的一张人脸进行确认。
     * @param pnReason [output] 输出确认信息。    FaceVerify_None, FaceVerify_OK,   FaceVerify_Small,   FaceVerify_Slant, FaceVerify_Fail
     * @return 见接口返回值说明。
     */
    public int FaceVerifationforAddNewUser(byte[] pbImg, int nWidth, int nHeight, int[] pFacePos, int[] pnReason) {
        int result = HW_OK;
        if (pbImg == null) {
            return HW_INVALID_IMG;
        }
        if (pFacePos == null || pnReason == null) {
            return HW_INVALID_ARG;
        }
        result = FaceCoreHelper.FaceVerifationforAddNewUser(pbImg, nWidth, nHeight, pFacePos, pnReason);
        return result;
    }


    /**
     * 剪裁YUV人脸数据
     *
     * @param pInYUVImgBuf   输入YUVNV21数据
     * @param nInImageWidth  宽度
     * @param nInImageHeight 高度
     * @param nInRectWidth   截图宽度
     * @param nInRectHeight  截取高度
     * @param nTop           左上角点坐标Y
     * @param nLeft          左上角点坐标X
     * @param pOutBuff       截取的YUV数据
     * @return 见接口返回明
     */
    public int CropYUVNV21(byte[] pInYUVImgBuf, int nIndex, int nInImageWidth, int nInImageHeight, int nInRectWidth, int nInRectHeight, int nTop, int nLeft, byte[] pOutBuff) {
        if (pInYUVImgBuf == null) {
            return HW_INVALID_IMG;
        }
        if (pOutBuff == null) {
            return HW_INVALID_ARG;
        }

        return FaceCoreHelper.CropYUVNV21(pInYUVImgBuf, nIndex, nInImageWidth, nInImageHeight, nInRectWidth, nInRectHeight, nTop, nLeft, pOutBuff);
    }

    /**
     * 将NV12YUV数据转换为NV21数据
     *
     * @param pbNV12
     * @param pbNV21
     * @param nWidth
     * @param nHeight
     */
    public void NV12ToNV21(byte[] pbNV12, byte[] pbNV21, int nWidth, int nHeight) {
        int size = nWidth * nHeight;
        int total = (nWidth * nHeight * 3) >> 1;

        System.arraycopy(pbNV12, 0, pbNV21, 0, size);
        for (int i = size; i < total; i += 2) {
            pbNV21[i] = pbNV12[i + 1];
            pbNV21[i + 1] = pbNV12[i];
        }
    }

    /**
     * so 提取特征模板
     *
     * @param pbImg           图像数据灰度图
     * @param nWidth          图像宽度
     * @param nHeight         图像高度
     * @param pnFrEyePosition 检测到人脸位置
     * @param pbFrFaceFeature 提取的模板数据
     * @return
     */
    public int GetFaceFeatureEx(byte[] pbImg, int nWidth, int nHeight, int[] pFrEyePosition, byte[] pFrFaceFeature) {
        int result = HW_OK;
        result = FaceCoreHelper.GetFaceFeatureEx(pbImg, nWidth, nHeight, pFrEyePosition, pFrFaceFeature);
        return result;
    }

    /**
     * so 特征比对
     *
     * @param pbThisFeature      待比对特征
     * @param pbReferenceFeature 已录入模板
     * @param pnFrvalue          比对得分
     * @return
     */
    public int CompareFeature(byte[] pThisFeature, byte[] pReferenceFeature, int[] Frvalue) {
        int result = HW_OK;
        result = FaceCoreHelper.CompareFeature(pThisFeature, pReferenceFeature, Frvalue);
        return result;
    }

    /**
     * so 特征比对第二段
     *
     * @param pbThisFeature      待比对特征
     * @param pbReferenceFeature 已录入模板
     * @param pnFrvalue          比对得分
     * @return
     */
    public int CompareSecondFeature(byte[] pThisFeature, byte[] pReferenceFeature, int[] Frvalue) {
        int result = HW_OK;
        result = FaceCoreHelper.CompareSecondFeature(pThisFeature, pReferenceFeature, Frvalue);
        return result;
    }

    /**
     * so 获取特征大小
     *
     * @param pnFrFaceFeatureLength 返回特征大小
     * @return
     */
    public int GetFeatureSize(int[] FrFaceFeatureLength) {
        int result = HW_OK;
        if (FrFaceFeatureLength != null) {
            result = FaceCoreHelper.GetFeatureSize(FrFaceFeatureLength);
        }
        return result;
    }

    /**
     * so 直接用已提取的模板录入用户，本函数区别于AddNewUser函数，上述函数式通过输入一些图片数据，录入。
     *
     * @param info           输入用户信息
     * @param pbFeatureArray 带录入特征数据
     * @param nTempletNum    模板数
     * @param pnID           返回用户ID
     * @return
     */
    public int AddUserWithFeatures(OUserInfo info, byte[] pbFeatureArray, int nTempletNum, int[] pnID) {
        int result = HW_OK;
        if (info == null) {
            return HW_SDK_USERINFO_NOTEXIST;
        }

        if (pnID == null) {
            return HW_SDK_USERINFO_ERRID;
        }

        if (info.pbImageArray == null || info.pFacePos == null) {
            return HW_INVALID_ARG;
        }
        byte[] pSrcData = new byte[iSrcAllocSize + 15000];
        int[] pnSize = new int[1];

        result = FaceCoreHelper.AddUserWithFeatures(pbFeatureArray, info.strName, nTempletNum, pnID, pSrcData, pnSize);
        MyLog.i("HWFaceLib", "添加用户-->" + pnID[0] + "数据长度:" + pnSize[0]);
        //保存数据
        if (pnID[0] != -1) {
            if ((pSrcData != null) && (pSrcData.length > 12)) {
                FileOutputStream out = null;
                //写文件
                try {
                    out = new FileOutputStream(strDataFile2 + "/" + strDataFile + "/hwFeature.dat");
                    out.write(pSrcData, 0, pnSize[0]);
                    out.flush();
                    out.close();

                } catch (Exception e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
                int[] nIndex = new int[1];
                GetUserIDCount(nIndex);
                //保存截图
                SaveCaptureImg(info, pnID[0], nIndex[0] + 1);
            }

            ReleaseFaceEngine();
            InitFaceEngine(strUUID);
        }

        return result;
    }

    /**
     * 姿态检测，给出图像中指定位置上人脸摆动检测信息
     *
     * @param pbImg[input]   输入灰度图
     * @param iWidth[input]  输入图片的宽度
     * @param iHeight[input] 输入图片的高度
     * @param pFacePos       [output]  人脸位置信息
     * @param piRst[output]  输出人脸侧位的类型：0.人脸正侧 1.人脸左侧 2.人脸右侧
     * @return
     */
    public int RrofileRecognition(byte[] pbImg, int iWidth, int iHeight, Rect[] pFacePos, eRrofileRule[] eRroRule) {
        int result = HW_FAIL;
        int[] faceNum = new int[1];
        int[] facePosition = new int[12];
        byte[] pbRam = new byte[600 * 1024];
        result = FaceDetection(pbImg, iWidth, iHeight, facePosition, 1, faceNum);

        if (faceNum[0] == 1) {
            FacePos facepos = GetFacePos(facePosition, 0);

            int startX = facepos.nCol - (facepos.nWidth / 2);
            int startY = facepos.nRow - (facepos.nHeight / 2);

            int stopX = startX + facepos.nWidth;
            int stopY = startY + facepos.nHeight;

            pFacePos[0] = new Rect(startX, startY, stopX, stopY);

            int iResult = FaceCoreHelper.JudgeImagePos(pbImg, iWidth, iHeight, facepos.leftx, facepos.lefty, facepos.rightx, facepos.righty, pbRam, 1);

            if (iResult > -1) {
                eRroRule[0] = getRrofileRule(iResult);
                result = HW_OK;
            }
        }
        return result;
    }


    //将图片保存到临时目录
    public String saveCropBitmap(byte[] data, int nWidth, int nHeight, int[] tempfacePos) {
        String strCapImg = "";
        FacePos facepos = HWFaceLib.getInstance().GetFacePos(tempfacePos, 0);

        YuvImage yuvimage = new YuvImage(data, ImageFormat.NV21,
                nWidth, nHeight, null);

        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        yuvimage.compressToJpeg(
                new Rect(0, 0, nWidth, nHeight), 100, baos);

        Bitmap newCropBitMap = BitmapFactory.decodeByteArray(baos.toByteArray(), 0, baos.toByteArray().length);
        try {
            baos.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        int startX = 0;
        int startY = 0;
        int stopX = 0;
        int stopY = 0;
        if (facepos.nConfidence > 0) {
            startX = facepos.nCol - ((facepos.nWidth / 2));
            startY = facepos.nRow - ((facepos.nHeight / 2));
            stopX = facepos.nCol + ((facepos.nWidth / 2));
            stopY = facepos.nRow + ((facepos.nHeight / 2));
        }

        try {

            Bitmap newBitMap = Bitmap.createBitmap(newCropBitMap, startX, startY, stopX - startX, stopY - startY);
            strCapImg = strDataFile2 + "/" + "tmp.jpg";
            File myCaptureFile = new File(strCapImg);
            if (myCaptureFile.exists())
                myCaptureFile.delete();
            BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(myCaptureFile));
            newBitMap.compress(Bitmap.CompressFormat.JPEG, 100, bos);
            bos.flush();
            bos.close();
            if (newBitMap != null && !newBitMap.isRecycled()) {
                newBitMap.recycle();
                newBitMap = null;
            }
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }


        if (newCropBitMap != null && !newCropBitMap.isRecycled()) {
            newCropBitMap.recycle();
            newCropBitMap = null;
        }


        return strCapImg;

    }

    /**
     * so 追加用户和模板，上述函数式通过输入一些图片数据，录入。
     *
     * @param pbFeatureArray 带录入特征数据
     * @param nTempletNum    模板数
     * @param pnID           返回用户ID
     * @return
     */
    public int AddPlusUserWithFeatures(byte[] pbFeatureArray, int nTempletNum, int[] pnID, String strName) {
        int result = HW_OK;
        byte[] pSrcData = new byte[iSrcAllocSize + 15000];
        int[] pnSize = new int[1];
//		int[] pnNum = new int[1];
//		FaceCoreHelper.GetUserIDCount(pnNum);
//		String strName= String.valueOf(pnNum[0] + 1);		

        result = FaceCoreHelper.AddUserWithFeatures(pbFeatureArray, strName, nTempletNum, pnID, pSrcData, pnSize);

        //保存数据
        if (pnID[0] != -1) {
            if ((pSrcData != null) && (pSrcData.length > 12)) {
                FileOutputStream out = null;

                //写文件
                try {
                    out = new FileOutputStream(strDataFile2 + "/" + strDataFile + "/hwFeature.dat");
                    out.write(pSrcData, 0, pnSize[0]);
                    out.flush();
                    out.close();

                } catch (Exception e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }

                //保存截图
                String str = String.format("/%d", pnID[0]);
                String filePath = strDataFile2 + "/" + strDataFile;
                String strPath = null;
                try {
                    File root = new File(filePath + "/ImgFiles");
                    if (!root.exists()) {
                        root.mkdir();
                    }
                    // 创建目录
                    strPath = filePath + "/ImgFiles" + str;
                    root = new File(strPath);
                    if (!root.exists()) {
                        root.mkdir();
                    }
                } catch (Exception e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
                String strCapImg = strPath + "/" + String.format("%d.jpg", 0);
                String strSrcImg = strDataFile2 + "/" + "tmp.jpg";
                copyDatabase(strSrcImg, strCapImg);
            }

            ReleaseFaceEngine();
            InitFaceEngine(strUUID);
        }

        return result;
    }

    // 复制到指定目录
    private void copyDatabase(String strSrcFilePath, String strDstFilePath) {
        try {
            File srcFile = new File(strSrcFilePath);
            File dstFile = new File(strDstFilePath);
            if (!dstFile.exists())
                dstFile.createNewFile();

            if (srcFile != null && srcFile.exists()) {
                // 获得封装 文件的FileInputStream
                FileInputStream is = new FileInputStream(srcFile);
                if (is != null) {
                    FileOutputStream fos = new FileOutputStream(dstFile);
                    byte[] srcData = new byte[is.available()];
                    is.read(srcData, 0, is.available());
                    is.close();
                    fos.write(srcData);
                    fos.close();
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    public eRrofileRule getRrofileRule(int pbRam) {
        eRrofileRule eRam = null;

        switch (pbRam) {
            case 0:
                eRam = eRrofileRule.Rrofile_Front;
                break;
            case 1:
                eRam = eRrofileRule.Rrofile_Left;
                break;
            case 2:
                eRam = eRrofileRule.Rrofile_Right;
                break;
            case 3:
                eRam = eRrofileRule.Rrofile_Up;
                break;
            case 4:
                eRam = eRrofileRule.Rrofile_Down;
                break;
            default:
                break;
        }

        return eRam;
    }


    /**
     * 获取表情
     *
     * @param iRst[input] 表情标识
     * @return 表情枚举类型
     */
    public eExpressionRule getExpressionRule(int iRst) {
        eExpressionRule eRst = null;
        switch (iRst) {
            case 0:
                eRst = eExpressionRule.Exp_None;
                break;
            case 1:
                eRst = eExpressionRule.Exp_Angry;
                break;
            case 2:
                eRst = eExpressionRule.Exp_Happy;
                break;
            case 3:
                eRst = eExpressionRule.Exp_Sad;
                break;
            case 4:
                eRst = eExpressionRule.Exp_Surpprise;
                break;
            default:
                eRst = eExpressionRule.Exp_None;
                break;
        }
        return eRst;
    }

    /**
     * 获取表情程度
     *
     * @param piScore[input] 表情程度标识
     * @return 表情程度枚举类型
     */
    public eExpDegreeRule getExpDegreeRule(int piScore) {
        eExpDegreeRule eScore = null;

        if (piScore < 65) {
            eScore = eExpDegreeRule.Exp_Slight;
        } else if (piScore > 85) {
            eScore = eExpDegreeRule.Exp_Extreme;
        } else {
            eScore = eExpDegreeRule.Exp_Striking;
        }

        return eScore;
    }


    /***
     * 检测严格程度
     *
     * @author HuangCH 较宽松 宽松 普通 严格
     */
    public enum eDetectionStrictRule {
        Detect_Looser, Detect_Loose, Detect_Common, Detect_Strict
    }

    /***
     * 检测远近
     *
     * @author HuangCH 远处 近处
     */
    public enum eDetectionDistanceRule {
        Detect_Far, Detect_Near
    }

    /***
     * 识别严格程度
     *
     * @author HuangCH 宽松 普通 严格
     */
    public enum eRecogRule {
        Recog_Loose, Recog_Common, Recog_Strict
    }

    /**
     * 表情类型
     *
     * @author HuangCH
     *         EXP_NONE 0        //面无表情型
     *         EXP_ANGRY 1       //生气愤怒型
     *         EXP_HAPPY 2       //高兴喜乐型
     *         EXP_SAD   3       //悲伤哀泣型
     *         EXP_SURPPRISE 4   //惊讶瞠目型
     */
    public enum eExpressionRule {
        Exp_None, Exp_Angry, Exp_Happy, Exp_Sad, Exp_Surpprise
    }


    /**
     * 表情程度类型
     *
     * @author HuangCH
     *         Exp_Slight 0       //轻微
     *         Exp_Striking 1     //显著
     *         Exp_Extreme 2      //极度
     */
    public enum eExpDegreeRule {
        Exp_Slight, Exp_Striking, Exp_Extreme
    }

    /**
     * 人脸侧位类型
     *
     * @author HuangCH
     *         Rrofile_Front     0       //人脸正面
     *         Rrofile_Left      1       //人脸左侧
     *         Rrofile_Right     2       //人脸右侧
     *         Rrofile_UP_FACE   3       //脸略仰起
     *         Rrofile_DOWN_FACE 4       //略低头
     */
    public enum eRrofileRule {
        Rrofile_Front,
        Rrofile_Left,
        Rrofile_Right,
        Rrofile_Up,
        Rrofile_Down
    }
}
